Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +30 -0
llmdolphin.py
CHANGED
@@ -59,11 +59,41 @@ llm_models = {
|
|
59 |
"Qwen2.5-14B_Uncensored_Instruct.Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B_Uncensored_Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
60 |
"EVA-Qwen2.5-14B-v0.0.i1-IQ4_XS.gguf": ["mradermacher/EVA-Qwen2.5-14B-v0.0-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
61 |
"MN-12B-Vespa-x1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Vespa-x1-i1-GGUF", MessagesFormatterType.CHATML],
|
|
|
62 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
63 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
64 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
65 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
66 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
"Zinakha-12b.Q4_K_M.gguf": ["mradermacher/Zinakha-12b-GGUF", MessagesFormatterType.MISTRAL],
|
68 |
"Gemma-2-Ataraxy-v3-Advanced-9B.i1-Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v3-Advanced-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
69 |
"MFANN-llama3.1-abliterated-SLERP-v3.1.Q5_K_M.gguf": ["mradermacher/MFANN-llama3.1-abliterated-SLERP-v3.1-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
59 |
"Qwen2.5-14B_Uncensored_Instruct.Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B_Uncensored_Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
60 |
"EVA-Qwen2.5-14B-v0.0.i1-IQ4_XS.gguf": ["mradermacher/EVA-Qwen2.5-14B-v0.0-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
61 |
"MN-12B-Vespa-x1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Vespa-x1-i1-GGUF", MessagesFormatterType.CHATML],
|
62 |
+
"Mistral-Nemo-12B-ArliAI-RPMax-v1.1.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-12B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
63 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
64 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
65 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
66 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
67 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
68 |
+
"GEMMA2-9b-Pollux-exp.Q4_K_M.gguf": ["mradermacher/GEMMA2-9b-Pollux-exp-GGUF", MessagesFormatterType.ALPACA],
|
69 |
+
"Gemma-2-Ataraxy-v4a-Advanced-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4a-Advanced-9B-GGUF", MessagesFormatterType.ALPACA],
|
70 |
+
"llama-3.1-8b-titanfusion-mix-2.1-q4_k_m-imat.gguf": ["bunnycore/Llama-3.1-8B-TitanFusion-Mix-2.1-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
71 |
+
"Gemma-2-Ataraxy-v4-Advanced-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4-Advanced-9B-GGUF", MessagesFormatterType.ALPACA],
|
72 |
+
"Gemma-2-9B-ArliAI-RPMax-v1.1.i1-Q4_K_S.gguf": ["mradermacher/Gemma-2-9B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.ALPACA],
|
73 |
+
"SuperNeuralDreadDevil-8b.Q5_K_M.gguf": ["mradermacher/SuperNeuralDreadDevil-8b-GGUF", MessagesFormatterType.LLAMA_3],
|
74 |
+
"astral-fusion-neural-happy-l3.1-8b-q4_0.gguf": ["ZeroXClem/Astral-Fusion-Neural-Happy-L3.1-8B-Q4_0-GGUF", MessagesFormatterType.LLAMA_3],
|
75 |
+
"LexiMaid-L3-8B.Q5_K_M.gguf": ["mradermacher/LexiMaid-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
76 |
+
"ModeliCo-8B.i1-Q5_K_M.gguf": ["mradermacher/ModeliCo-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
77 |
+
"Llama3-8B-function-calling-dpo-slerp.i1-Q5_K_M.gguf": ["mradermacher/Llama3-8B-function-calling-dpo-slerp-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
78 |
+
"Aspire1.2-8B-TIES.i1-Q5_K_M.gguf": ["mradermacher/Aspire1.2-8B-TIES-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
79 |
+
"Rombos-LLM-V2.6-Qwen-14b.Q4_K_M.gguf": ["mradermacher/Rombos-LLM-V2.6-Qwen-14b-GGUF", MessagesFormatterType.OPEN_CHAT],
|
80 |
+
"Moonlight-L3-15B-v2-64k.Q4_K_M.gguf": ["mradermacher/Moonlight-L3-15B-v2-64k-GGUF", MessagesFormatterType.LLAMA_3],
|
81 |
+
"mistral-7b-ppo-hermes-v0.3.Q5_K_M.gguf": ["mradermacher/mistral-7b-ppo-hermes-v0.3-GGUF", MessagesFormatterType.MISTRAL],
|
82 |
+
"Gemma-2-9B-ArliAI-RPMax-v1.1.Q4_K_M.gguf": ["mradermacher/Gemma-2-9B-ArliAI-RPMax-v1.1-GGUF", MessagesFormatterType.ALPACA],
|
83 |
+
"Mistral-Nemo-12B-ArliAI-RPMax-v1.1.Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-12B-ArliAI-RPMax-v1.1-GGUF", MessagesFormatterType.MISTRAL],
|
84 |
+
"Odin-9B.i1-Q4_K_M.gguf": ["mradermacher/Odin-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
85 |
+
"writing-roleplay-20k-context-nemo-12b-v1.0-q4_k_m.gguf": ["openerotica/writing-roleplay-20k-context-nemo-12b-v1.0-gguf", MessagesFormatterType.CHATML],
|
86 |
+
"llama-3.2-3b-titanfusion-v2-q4_k_m.gguf": ["bunnycore/Llama-3.2-3B-TitanFusion-v2-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
87 |
+
"gemma-2-9b-it-function-calling-q4_k_m.gguf": ["NeuroWhAI/gemma-2-9b-it-function-calling-Q4_K_M-GGUF", MessagesFormatterType.ALPACA],
|
88 |
+
"mt-gemma-2-9b-q6_k.gguf": ["zelk12/MT-gemma-2-9B-Q6_K-GGUF", MessagesFormatterType.ALPACA],
|
89 |
+
"Halu-8B-Llama3-Blackroot-Q5_K_M.gguf": ["bartowski/Halu-8B-Llama3-Blackroot-GGUF", MessagesFormatterType.LLAMA_3],
|
90 |
+
"OpenO1-Qwen-7B-v0.1.i1-Q5_K_M.gguf": ["mradermacher/OpenO1-Qwen-7B-v0.1-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
91 |
+
"rombos_Llama3-8B-Instruct-Replete-Adapted.i1-Q5_K_M.gguf": ["mradermacher/rombos_Llama3-8B-Instruct-Replete-Adapted-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
92 |
+
"Llama-3.1-8B-MagPie-Ultra.i1-Q5_K_M.gguf": ["mradermacher/Llama-3.1-8B-MagPie-Ultra-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
93 |
+
"EdgeRunner-Command-Nested.Q5_K_M.gguf": ["mradermacher/EdgeRunner-Command-Nested-GGUF", MessagesFormatterType.OPEN_CHAT],
|
94 |
+
"CursorCore-QW2.5-7B.i1-Q5_K_M.gguf": ["mradermacher/CursorCore-QW2.5-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
95 |
+
"Odins-Allseeing-Eye-9B.i1-Q4_K_M.gguf": ["mradermacher/Odins-Allseeing-Eye-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
96 |
+
"MN-GRAND-Gutenberg-Lyra4-Lyra-12B-MADNESS.i1-Q4_K_M.gguf": ["mradermacher/MN-GRAND-Gutenberg-Lyra4-Lyra-12B-MADNESS-i1-GGUF", MessagesFormatterType.MISTRAL],
|
97 |
"Zinakha-12b.Q4_K_M.gguf": ["mradermacher/Zinakha-12b-GGUF", MessagesFormatterType.MISTRAL],
|
98 |
"Gemma-2-Ataraxy-v3-Advanced-9B.i1-Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v3-Advanced-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
99 |
"MFANN-llama3.1-abliterated-SLERP-v3.1.Q5_K_M.gguf": ["mradermacher/MFANN-llama3.1-abliterated-SLERP-v3.1-GGUF", MessagesFormatterType.LLAMA_3],
|