Update app.py
Browse files
app.py
CHANGED
@@ -23,14 +23,18 @@ client = OpenAI(
|
|
23 |
|
24 |
# Create supported models
|
25 |
model_links = {
|
|
|
|
|
|
|
|
|
26 |
"Meta-Llama-3-70B-Instruct": "meta-llama/Meta-Llama-3-70B-Instruct",
|
|
|
27 |
"C4ai-command-r-plus": "CohereForAI/c4ai-command-r-plus",
|
28 |
"Aya-23-35B": "CohereForAI/aya-23-35B",
|
29 |
"Zephyr-orpo-141b-A35b-v0.1": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
|
30 |
"Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
31 |
"Codestral-22B-v0.1": "mistralai/Codestral-22B-v0.1",
|
32 |
"Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
33 |
-
"Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct",
|
34 |
"Yi-1.5-34B-Chat": "01-ai/Yi-1.5-34B-Chat",
|
35 |
"Gemma-2-27b-it": "google/gemma-2-27b-it",
|
36 |
"Meta-Llama-2-70B-Chat-HF": "meta-llama/Llama-2-70b-chat-hf",
|
|
|
23 |
|
24 |
# Create supported models
|
25 |
model_links = {
|
26 |
+
|
27 |
+
"Meta-Llama-3.1-405B-Instruct": "meta-llama/Meta-Llama-3.1-405B-Instruct",
|
28 |
+
"Meta-Llama-3.1-405B-Instruct-FP8": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
|
29 |
+
"Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct",
|
30 |
"Meta-Llama-3-70B-Instruct": "meta-llama/Meta-Llama-3-70B-Instruct",
|
31 |
+
"Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct",
|
32 |
"C4ai-command-r-plus": "CohereForAI/c4ai-command-r-plus",
|
33 |
"Aya-23-35B": "CohereForAI/aya-23-35B",
|
34 |
"Zephyr-orpo-141b-A35b-v0.1": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
|
35 |
"Mixtral-8x7B-Instruct-v0.1": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
36 |
"Codestral-22B-v0.1": "mistralai/Codestral-22B-v0.1",
|
37 |
"Nous-Hermes-2-Mixtral-8x7B-DPO": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
|
|
|
38 |
"Yi-1.5-34B-Chat": "01-ai/Yi-1.5-34B-Chat",
|
39 |
"Gemma-2-27b-it": "google/gemma-2-27b-it",
|
40 |
"Meta-Llama-2-70B-Chat-HF": "meta-llama/Llama-2-70b-chat-hf",
|