Spaces:
Running
on
Zero
Running
on
Zero
Upload 3 files
Browse files- llmdolphin.py +2 -0
- requirements.txt +2 -2
llmdolphin.py
CHANGED
@@ -25,6 +25,7 @@ llm_models = {
|
|
25 |
"MN-12B-Starcannon-v3.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v3-i1-GGUF", MessagesFormatterType.CHATML],
|
26 |
"MN-12B-Starcannon-v4-unofficial.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v4-unofficial-i1-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
"MN-12B-Starsong-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starsong-v1-i1-GGUF", MessagesFormatterType.CHATML],
|
|
|
28 |
"Lumimaid-Magnum-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-Magnum-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
29 |
"Nemo-12B-Marlin-v1.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
30 |
"Nemo-12B-Marlin-v2.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v2-i1-GGUF", MessagesFormatterType.MISTRAL],
|
@@ -45,6 +46,7 @@ llm_models = {
|
|
45 |
"MagnumChronos.Q4_K_M.gguf": ["mradermacher/MagnumChronos-GGUF", MessagesFormatterType.CHATML],
|
46 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
47 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
48 |
"l3.1-niitorm-8b-q8_0.gguf": ["v000000/L3.1-Niitorm-8B-t0.0001-Q8_0-GGUF", MessagesFormatterType.MISTRAL],
|
49 |
"mn-12b-lyra-v3-q5_k_m.gguf": ["HalleyStarbun/MN-12B-Lyra-v3-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
|
50 |
"mergekit-model_stock-ffibbcs-q4_k_s.gguf": ["DarwinAnim8or/mergekit-model_stock-ffibbcs-Q4_K_S-GGUF", MessagesFormatterType.ALPACA],
|
|
|
25 |
"MN-12B-Starcannon-v3.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v3-i1-GGUF", MessagesFormatterType.CHATML],
|
26 |
"MN-12B-Starcannon-v4-unofficial.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v4-unofficial-i1-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
"MN-12B-Starsong-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starsong-v1-i1-GGUF", MessagesFormatterType.CHATML],
|
28 |
+
"StarDust-12b-v1-Q4_K_M.gguf": ["Luni/StarDust-12b-v1-GGUF", MessagesFormatterType.MISTRAL],
|
29 |
"Lumimaid-Magnum-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-Magnum-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
30 |
"Nemo-12B-Marlin-v1.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
31 |
"Nemo-12B-Marlin-v2.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v2-i1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
46 |
"MagnumChronos.Q4_K_M.gguf": ["mradermacher/MagnumChronos-GGUF", MessagesFormatterType.CHATML],
|
47 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
48 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
49 |
+
"Fireball-12B-v1.13a-philosophers.Q4_K_M.gguf": ["mradermacher/Fireball-12B-v1.13a-philosophers-GGUF", MessagesFormatterType.MISTRAL],
|
50 |
"l3.1-niitorm-8b-q8_0.gguf": ["v000000/L3.1-Niitorm-8B-t0.0001-Q8_0-GGUF", MessagesFormatterType.MISTRAL],
|
51 |
"mn-12b-lyra-v3-q5_k_m.gguf": ["HalleyStarbun/MN-12B-Lyra-v3-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
|
52 |
"mergekit-model_stock-ffibbcs-q4_k_s.gguf": ["DarwinAnim8or/mergekit-model_stock-ffibbcs-Q4_K_S-GGUF", MessagesFormatterType.ALPACA],
|
requirements.txt
CHANGED
@@ -1,6 +1,7 @@
|
|
|
|
1 |
huggingface_hub
|
2 |
scikit-build-core
|
3 |
-
https://github.com/abetlen/llama-cpp-python/releases/download/v0.2.
|
4 |
git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
5 |
pybind11>=2.12
|
6 |
torch==2.2.0
|
@@ -8,7 +9,6 @@ torchvision
|
|
8 |
accelerate
|
9 |
transformers
|
10 |
optimum[onnxruntime]
|
11 |
-
spaces
|
12 |
dartrs
|
13 |
httpx==0.13.3
|
14 |
httpcore
|
|
|
1 |
+
spaces
|
2 |
huggingface_hub
|
3 |
scikit-build-core
|
4 |
+
https://github.com/abetlen/llama-cpp-python/releases/download/v0.2.90-cu124/llama_cpp_python-0.2.90-cp310-cp310-linux_x86_64.whl
|
5 |
git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
6 |
pybind11>=2.12
|
7 |
torch==2.2.0
|
|
|
9 |
accelerate
|
10 |
transformers
|
11 |
optimum[onnxruntime]
|
|
|
12 |
dartrs
|
13 |
httpx==0.13.3
|
14 |
httpcore
|