Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +10 -0
llmdolphin.py
CHANGED
@@ -25,7 +25,17 @@ llm_models = {
|
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
26 |
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
"NemoReRemix-12B-Q4_K_M.gguf": ["bartowski/NemoReRemix-12B-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
|
|
28 |
"Rocinante-12B-v1b-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v1b-GGUF", MessagesFormatterType.CHATML],
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
"HolyNemo-12B.Q4_K_M.gguf": ["mradermacher/HolyNemo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
30 |
"mistral-nemo-gutenberg-12B-v2.Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v2-GGUF", MessagesFormatterType.MISTRAL],
|
31 |
"KukulStanta-InfinityRP-7B-slerp.Q5_K_M.gguf": ["mradermacher/KukulStanta-InfinityRP-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
26 |
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
"NemoReRemix-12B-Q4_K_M.gguf": ["bartowski/NemoReRemix-12B-GGUF", MessagesFormatterType.MISTRAL],
|
28 |
+
"Aura-NeMo-12B-Q4_K_L-imat.gguf": ["Reiterate3680/Aura-NeMo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
29 |
+
"TypeII-12B.Q4_K_S.gguf": ["mradermacher/TypeII-12B-GGUF", MessagesFormatterType.MISTRAL],
|
30 |
+
"Peach-9B-8k-Roleplay-Q4_K_M.gguf": ["bartowski/Peach-9B-8k-Roleplay-GGUF", MessagesFormatterType.LLAMA_3],
|
31 |
+
"heartstolen_model-stock_8b-q4_k_m.gguf": ["DreadPoor/HeartStolen_model-stock_8B-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
32 |
"Rocinante-12B-v1b-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v1b-GGUF", MessagesFormatterType.CHATML],
|
33 |
+
"Llama-3.1-8B-ArliAI-Formax-v1.0-Q5_K_M.gguf": ["ArliAI/Llama-3.1-8B-ArliAI-Formax-v1.0-GGUF", MessagesFormatterType.MISTRAL],
|
34 |
+
"ArliAI-Llama-3-8B-Formax-v1.0-Q5_K_M.gguf": ["ArliAI/ArliAI-Llama-3-8B-Formax-v1.0-GGUF", MessagesFormatterType.LLAMA_3],
|
35 |
+
"badger-writer-llama-3-8b-q4_k_m.gguf": ["A2va/badger-writer-llama-3-8b-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
36 |
+
"magnum-12b-v2.5-kto-Q4_K_L-imat.gguf": ["Reiterate3680/magnum-12b-v2.5-kto-GGUF", MessagesFormatterType.CHATML],
|
37 |
+
"CeleMo-Instruct-128k.Q4_K_S.gguf": ["mradermacher/CeleMo-Instruct-128k-GGUF", MessagesFormatterType.CHATML],
|
38 |
+
"KukulStanta-7B-Seamaiiza-7B-v1-slerp-merge.q3_k_l.gguf": ["AlekseiPravdin/KukulStanta-7B-Seamaiiza-7B-v1-slerp-merge-gguf", MessagesFormatterType.MISTRAL],
|
39 |
"HolyNemo-12B.Q4_K_M.gguf": ["mradermacher/HolyNemo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
40 |
"mistral-nemo-gutenberg-12B-v2.Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v2-GGUF", MessagesFormatterType.MISTRAL],
|
41 |
"KukulStanta-InfinityRP-7B-slerp.Q5_K_M.gguf": ["mradermacher/KukulStanta-InfinityRP-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
|