Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +11 -27
chain_app.py
CHANGED
@@ -7,6 +7,7 @@ from chainlit.input_widget import Select, Slider
|
|
7 |
import os
|
8 |
import cohere
|
9 |
from huggingface_hub import InferenceClient
|
|
|
10 |
|
11 |
|
12 |
hf_token = os.environ.get("HF_TOKEN")
|
@@ -2197,34 +2198,17 @@ async def main(message: cl.Message):
|
|
2197 |
# ).send()
|
2198 |
|
2199 |
elif chat_profile == 'Llama-3.1-405B':
|
2200 |
-
|
2201 |
-
|
2202 |
-
|
2203 |
-
|
2204 |
-
|
2205 |
-
|
2206 |
-
|
2207 |
-
],
|
2208 |
-
temperature=1,
|
2209 |
-
max_tokens=1024,
|
2210 |
-
top_p=1,
|
2211 |
-
stream=True,
|
2212 |
-
stop=None,
|
2213 |
)
|
2214 |
-
|
2215 |
-
|
2216 |
-
|
2217 |
-
# Iterate over each chunk
|
2218 |
-
for chunk in completion:
|
2219 |
-
# Retrieve the content from the current chunk
|
2220 |
-
content = chunk.choices[0].delta.content
|
2221 |
-
|
2222 |
-
# Check if the content is not None before concatenating it
|
2223 |
-
if content is not None:
|
2224 |
-
complete_content += content
|
2225 |
-
|
2226 |
-
# Send the concatenated content as a message
|
2227 |
-
await cl.Message(content=complete_content).send()
|
2228 |
|
2229 |
|
2230 |
elif chat_profile == 'Llama-3.1-70B':
|
|
|
7 |
import os
|
8 |
import cohere
|
9 |
from huggingface_hub import InferenceClient
|
10 |
+
from fireworks.client import Fireworks
|
11 |
|
12 |
|
13 |
hf_token = os.environ.get("HF_TOKEN")
|
|
|
2198 |
# ).send()
|
2199 |
|
2200 |
elif chat_profile == 'Llama-3.1-405B':
|
2201 |
+
client = Fireworks(api_key="O9D2HQsat357QRQ2wEAC3buShjL8Ea4K7ndDqwkAsAdsDulz")
|
2202 |
+
response = client.chat.completions.create(
|
2203 |
+
model="accounts/fireworks/models/llama-v3p1-405b-instruct",
|
2204 |
+
messages=[{
|
2205 |
+
"role": "user",
|
2206 |
+
"content": "Say this is a test",
|
2207 |
+
}],
|
|
|
|
|
|
|
|
|
|
|
|
|
2208 |
)
|
2209 |
+
await cl.Message(
|
2210 |
+
content=response.choices[0].message.content,
|
2211 |
+
).send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2212 |
|
2213 |
|
2214 |
elif chat_profile == 'Llama-3.1-70B':
|