Update app.py
Browse files
app.py
CHANGED
@@ -2,10 +2,10 @@ import gradio as gr
|
|
2 |
from gpt4all import GPT4All
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
|
5 |
-
title = "Apollo-
|
6 |
|
7 |
description = """
|
8 |
-
π [Apollo-
|
9 |
|
10 |
π¨ Running on CPU-Basic free hardware. Suggest duplicating this space to run without a queue.
|
11 |
|
@@ -13,13 +13,12 @@ Mistral does not support system prompt symbol (such as ```<<SYS>>```) now, input
|
|
13 |
"""
|
14 |
|
15 |
"""
|
16 |
-
[Model From
|
17 |
-
[Mistral-instruct-v0.1 System prompt](https://docs.mistral.ai/usage/guardrailing)
|
18 |
"""
|
19 |
|
20 |
model_path = "models"
|
21 |
-
model_name = "Apollo-
|
22 |
-
hf_hub_download(repo_id="FreedomIntelligence/Apollo-
|
23 |
|
24 |
print("Start the model init process")
|
25 |
model = model = GPT4All(model_name, model_path, allow_download = False, device="cpu")
|
|
|
2 |
from gpt4all import GPT4All
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
|
5 |
+
title = "Apollo-6B-GGUF Run On CPU"
|
6 |
|
7 |
description = """
|
8 |
+
π [Apollo-6B](https://huggingface.co/FreedomIntelligence/Apollo-6B) [GGUF format model](https://huggingface.co/FreedomIntelligence/Apollo-6B-GGUF) , 8-bit quantization balanced quality gguf version, running on CPU. Using [GitHub - llama.cpp](https://github.com/ggerganov/llama.cpp) [GitHub - gpt4all](https://github.com/nomic-ai/gpt4all).
|
9 |
|
10 |
π¨ Running on CPU-Basic free hardware. Suggest duplicating this space to run without a queue.
|
11 |
|
|
|
13 |
"""
|
14 |
|
15 |
"""
|
16 |
+
[Model From FreedomIntelligence/Apollo-6B-GGUF](https://huggingface.co/FreedomIntelligence/Apollo-6B-GGUF)
|
|
|
17 |
"""
|
18 |
|
19 |
model_path = "models"
|
20 |
+
model_name = "Apollo-6B-q8_0.gguf"
|
21 |
+
hf_hub_download(repo_id="FreedomIntelligence/Apollo-6B-GGUF", filename=model_name, local_dir=model_path, local_dir_use_symlinks=False)
|
22 |
|
23 |
print("Start the model init process")
|
24 |
model = model = GPT4All(model_name, model_path, allow_download = False, device="cpu")
|