Spaces:
Sleeping
Sleeping
clementsan
commited on
Commit
•
e27125d
1
Parent(s):
fe331ff
Add TinyLlama-1.1B-Chat-v1.0 model
Browse files
app.py
CHANGED
@@ -24,10 +24,11 @@ llm_name0 = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
24 |
llm_name1 = "mistralai/Mistral-7B-Instruct-v0.2"
|
25 |
llm_name2 = "mistralai/Mistral-7B-Instruct-v0.1"
|
26 |
llm_name3 = "meta-llama/Llama-2-7b-chat-hf"
|
27 |
-
llm_name4 = "
|
28 |
-
llm_name5 = "
|
29 |
-
llm_name6 = "
|
30 |
-
llm_name7 = "
|
|
|
31 |
list_llm = [llm_name0, llm_name1, llm_name2, llm_name3, llm_name4, llm_name5, llm_name6, llm_name7]
|
32 |
list_llm_simple = [os.path.basename(llm) for llm in list_llm]
|
33 |
|
@@ -106,6 +107,11 @@ def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, pr
|
|
106 |
repo_id=llm_model,
|
107 |
model_kwargs={"temperature": temperature, "max_new_tokens": max_tokens, "top_k": top_k, "trust_remote_code": True, "torch_dtype": "auto"}
|
108 |
)
|
|
|
|
|
|
|
|
|
|
|
109 |
else:
|
110 |
llm = HuggingFaceHub(
|
111 |
repo_id=llm_model,
|
|
|
24 |
llm_name1 = "mistralai/Mistral-7B-Instruct-v0.2"
|
25 |
llm_name2 = "mistralai/Mistral-7B-Instruct-v0.1"
|
26 |
llm_name3 = "meta-llama/Llama-2-7b-chat-hf"
|
27 |
+
llm_name4 = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
28 |
+
llm_name5 = "microsoft/phi-2"
|
29 |
+
llm_name6 = "mosaicml/mpt-7b-instruct"
|
30 |
+
llm_name7 = "tiiuae/falcon-7b-instruct"
|
31 |
+
llm_name8 = "google/flan-t5-xxl"
|
32 |
list_llm = [llm_name0, llm_name1, llm_name2, llm_name3, llm_name4, llm_name5, llm_name6, llm_name7]
|
33 |
list_llm_simple = [os.path.basename(llm) for llm in list_llm]
|
34 |
|
|
|
107 |
repo_id=llm_model,
|
108 |
model_kwargs={"temperature": temperature, "max_new_tokens": max_tokens, "top_k": top_k, "trust_remote_code": True, "torch_dtype": "auto"}
|
109 |
)
|
110 |
+
elif llm_model == "TinyLlama/TinyLlama-1.1B-Chat-v1.0":
|
111 |
+
llm = HuggingFaceHub(
|
112 |
+
repo_id=llm_model,
|
113 |
+
model_kwargs={"temperature": temperature, "max_new_tokens": 256, "top_k": top_k, "torch_dtype": "auto"}
|
114 |
+
)
|
115 |
else:
|
116 |
llm = HuggingFaceHub(
|
117 |
repo_id=llm_model,
|