Artin2009 commited on
Commit
3fc87fd
1 Parent(s): 5878e59

Update chain_app.py

Browse files
Files changed (1) hide show
  1. chain_app.py +29 -0
chain_app.py CHANGED
@@ -642,6 +642,35 @@ async def main(message: cl.Message):
642
  content=complete_message,
643
  ).send()
644
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
645
 
646
  elif chat_profile == 'Llama-3-70B':
647
  completion = groq_client.chat.completions.create(
 
642
  content=complete_message,
643
  ).send()
644
 
645
+ elif chat_profile == 'Llama-3.1-70B':
646
+ completion = groq_client.chat.completions.create(
647
+ model="llama-3.1-70b-versatile",
648
+ messages=[
649
+ {
650
+ "role": "user",
651
+ "content": message.content
652
+ }
653
+ ],
654
+ temperature=1,
655
+ max_tokens=1024,
656
+ top_p=1,
657
+ stream=True,
658
+ stop=None,
659
+ )
660
+
661
+ complete_content = ""
662
+
663
+ # Iterate over each chunk
664
+ for chunk in completion:
665
+ # Retrieve the content from the current chunk
666
+ content = chunk.choices[0].delta.content
667
+
668
+ # Check if the content is not None before concatenating it
669
+ if content is not None:
670
+ complete_content += content
671
+
672
+ # Send the concatenated content as a message
673
+ await cl.Message(content=complete_content).send()
674
 
675
  elif chat_profile == 'Llama-3-70B':
676
  completion = groq_client.chat.completions.create(