Spaces:
Running
on
Zero
Running
on
Zero
gokaygokay
commited on
Commit
•
070a924
1
Parent(s):
0455969
Update app.py
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ llm_model = None
|
|
14 |
|
15 |
hf_hub_download(
|
16 |
repo_id="bartowski/Reflection-Llama-3.1-70B-GGUF",
|
17 |
-
filename="Reflection-Llama-3.1-70B-
|
18 |
local_dir = "./models"
|
19 |
)
|
20 |
|
@@ -109,9 +109,9 @@ demo = gr.ChatInterface(
|
|
109 |
respond,
|
110 |
additional_inputs=[
|
111 |
gr.Dropdown([
|
112 |
-
"Reflection-Llama-3.1-70B-
|
113 |
],
|
114 |
-
value="Reflection-Llama-3.1-70B-
|
115 |
label="Model"
|
116 |
),
|
117 |
gr.Textbox(value="You are a world-class AI system, capable of complex reasoning and reflection. Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags. If you detect that you made a mistake in your reasoning at any point, correct yourself inside <reflection> tags.", label="System message"),
|
|
|
14 |
|
15 |
hf_hub_download(
|
16 |
repo_id="bartowski/Reflection-Llama-3.1-70B-GGUF",
|
17 |
+
filename="Reflection-Llama-3.1-70B-Q3_K_M.gguf",
|
18 |
local_dir = "./models"
|
19 |
)
|
20 |
|
|
|
109 |
respond,
|
110 |
additional_inputs=[
|
111 |
gr.Dropdown([
|
112 |
+
"Reflection-Llama-3.1-70B-Q3_K_M.gguf"
|
113 |
],
|
114 |
+
value="Reflection-Llama-3.1-70B-Q3_K_M.gguf",
|
115 |
label="Model"
|
116 |
),
|
117 |
gr.Textbox(value="You are a world-class AI system, capable of complex reasoning and reflection. Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags. If you detect that you made a mistake in your reasoning at any point, correct yourself inside <reflection> tags.", label="System message"),
|