Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +2 -1
chain_app.py
CHANGED
@@ -10,6 +10,7 @@ from huggingface_hub import InferenceClient
|
|
10 |
|
11 |
|
12 |
hf_token = os.environ.get("HF_TOKEN")
|
|
|
13 |
openai_api_key = os.environ.get('OPENAI_API_KEY')
|
14 |
groq_api_key = os.environ.get('GROQ_API_KEY')
|
15 |
cohere_api_key = os.environ.get('COHERE_API_KEY')
|
@@ -628,7 +629,7 @@ async def main(message: cl.Message):
|
|
628 |
elif chat_profile == 'Llama-3.1-405B':
|
629 |
client = InferenceClient(
|
630 |
"meta-llama/Meta-Llama-3.1-405B-Instruct",
|
631 |
-
token=
|
632 |
)
|
633 |
|
634 |
for message in client.chat_completion(
|
|
|
10 |
|
11 |
|
12 |
hf_token = os.environ.get("HF_TOKEN")
|
13 |
+
hf_token_llama_3_1 = os.environ.get('HF_TOKEN_FOR_31')
|
14 |
openai_api_key = os.environ.get('OPENAI_API_KEY')
|
15 |
groq_api_key = os.environ.get('GROQ_API_KEY')
|
16 |
cohere_api_key = os.environ.get('COHERE_API_KEY')
|
|
|
629 |
elif chat_profile == 'Llama-3.1-405B':
|
630 |
client = InferenceClient(
|
631 |
"meta-llama/Meta-Llama-3.1-405B-Instruct",
|
632 |
+
token=f'{hf_token_llama_3_1}',
|
633 |
)
|
634 |
|
635 |
for message in client.chat_completion(
|