Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +10 -1
llmdolphin.py
CHANGED
@@ -23,9 +23,18 @@ llm_models = {
|
|
23 |
"Nemo-12B-Marlin-v3.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v3-GGUF", MessagesFormatterType.MISTRAL],
|
24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
|
|
|
|
|
|
|
|
|
|
26 |
"MN-LooseCannon-12B-v1.Q4_K_M.gguf": ["mradermacher/MN-LooseCannon-12B-v1-GGUF", MessagesFormatterType.CHATML],
|
27 |
-
"
|
|
|
|
|
|
|
28 |
"NemoDori-v0.2.1-12B-MN-BT.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2.1-12B-MN-BT-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
29 |
"Mini-Magnum-Unboxed-12B-Q4_K_M.gguf": ["concedo/Mini-Magnum-Unboxed-12B-GGUF", MessagesFormatterType.ALPACA],
|
30 |
"L3.1-Siithamo-v0.1-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.1-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
31 |
"L3.1-Siithamo-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
23 |
"Nemo-12B-Marlin-v3.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v3-GGUF", MessagesFormatterType.MISTRAL],
|
24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
26 |
+
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
+
"L3-Boshima-a.Q5_K_M.gguf": ["mradermacher/L3-Boshima-a-GGUF", MessagesFormatterType.LLAMA_3],
|
28 |
+
"canidori-12b-v1-q5_k_m.gguf": ["NGalrion/Canidori-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL],
|
29 |
+
"MN-12B-Estrella-v1.Q4_K_S.gguf": ["mradermacher/MN-12B-Estrella-v1-GGUF", MessagesFormatterType.CHATML],
|
30 |
+
"gemmaomni2-2b-q5_k_m.gguf": ["bunnycore/GemmaOmni2-2B-Q5_K_M-GGUF", MessagesFormatterType.ALPACA],
|
31 |
"MN-LooseCannon-12B-v1.Q4_K_M.gguf": ["mradermacher/MN-LooseCannon-12B-v1-GGUF", MessagesFormatterType.CHATML],
|
32 |
+
"Pleiades-12B-v1.Q4_K_M.gguf": ["mradermacher/Pleiades-12B-v1-GGUF", MessagesFormatterType.CHATML],
|
33 |
+
"mistral-nemo-gutenberg-12B.Q4_K_S.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-GGUF", MessagesFormatterType.MISTRAL],
|
34 |
+
"gemma2-gutenberg-9B.Q4_K_M.gguf": ["mradermacher/gemma2-gutenberg-9B-GGUF", MessagesFormatterType.ALPACA],
|
35 |
+
"NemoDori-v0.5-12B-MN-BT.i1-Q4_K_M.gguf": ["mradermacher/NemoDori-v0.5-12B-MN-BT-i1-GGUF", MessagesFormatterType.MISTRAL],
|
36 |
"NemoDori-v0.2.1-12B-MN-BT.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2.1-12B-MN-BT-GGUF", MessagesFormatterType.MISTRAL],
|
37 |
+
"NemoDori-v0.2.2-12B-MN-ties.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2.2-12B-MN-ties-GGUF", MessagesFormatterType.MISTRAL],
|
38 |
"Mini-Magnum-Unboxed-12B-Q4_K_M.gguf": ["concedo/Mini-Magnum-Unboxed-12B-GGUF", MessagesFormatterType.ALPACA],
|
39 |
"L3.1-Siithamo-v0.1-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.1-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
40 |
"L3.1-Siithamo-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|