Zwea Htet commited on
Commit
b7600b8
1 Parent(s): 5ef1e14

update llamaCustom

Browse files
Files changed (1) hide show
  1. models/llamaCustom.py +9 -6
models/llamaCustom.py CHANGED
@@ -20,10 +20,11 @@ from llama_index import (
20
  StorageContext,
21
  load_index_from_storage,
22
  )
 
23
  from llama_index.llms.base import llm_completion_callback
24
  from llama_index.llms import CompletionResponse, CustomLLM, LLMMetadata
25
  from llama_index.prompts import Prompt
26
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
27
 
28
  from assets.prompts import custom_prompts
29
 
@@ -65,11 +66,13 @@ def load_model(model_name: str):
65
  return pipe
66
 
67
 
68
- class OurLLM(CustomLLM):
69
- def __init__(self, model_name: str, pipeline):
70
- super().__init__() # Call the __init__ method of CustomLLM
71
- self.model_name = model_name
72
- self.pipeline = pipeline
 
 
73
 
74
  @property
75
  def metadata(self) -> LLMMetadata:
 
20
  StorageContext,
21
  load_index_from_storage,
22
  )
23
+ from pydantic import BaseModel
24
  from llama_index.llms.base import llm_completion_callback
25
  from llama_index.llms import CompletionResponse, CustomLLM, LLMMetadata
26
  from llama_index.prompts import Prompt
27
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, Pipeline
28
 
29
  from assets.prompts import custom_prompts
30
 
 
66
  return pipe
67
 
68
 
69
+ class OurLLM(CustomLLM, BaseModel):
70
+ # def __init__(self, model_name: str, pipeline):
71
+ # super().__init__() # Call the __init__ method of CustomLLM
72
+ # self.model_name = model_name
73
+ # self.pipeline = pipeline
74
+ model_name: str = ""
75
+ pipeline: Pipeline = None
76
 
77
  @property
78
  def metadata(self) -> LLMMetadata: