abhi1nandy2 commited on
Commit
f5e2959
1 Parent(s): cee06d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -17
app.py CHANGED
@@ -45,28 +45,28 @@ def respond(
45
  messages = [{"role": "system", "content": system_message}]
46
 
47
  for val in history:
48
- # if val[0]:
49
- if len(val)>=1:
50
- messages.append({"role": "user", "content": "Question: "+val[0]})
51
- # if val[1]:
52
- if len(val)>=2:
53
- messages.append({"role": "assistant", "content": "Answer: "+val[1]})
54
 
55
  messages.append({"role": "user", "content": message})
56
 
57
- response = ""
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
- for message in client.chat_completion(
60
- messages,
61
- max_tokens=max_tokens,
62
- stream=True,
63
- temperature=temperature,
64
- top_p=top_p,
65
- ):
66
- token = message.choices[0].delta.content
67
 
68
- response += token
69
- yield response
70
 
71
  """
72
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
 
45
  messages = [{"role": "system", "content": system_message}]
46
 
47
  for val in history:
48
+ if len(val) >= 1:
49
+ messages.append({"role": "user", "content": "Question: " + val[0]})
50
+ if len(val) >= 2:
51
+ messages.append({"role": "assistant", "content": "Answer: " + val[1]})
 
 
52
 
53
  messages.append({"role": "user", "content": message})
54
 
55
+ try:
56
+ response = client.chat_completion(
57
+ messages,
58
+ max_tokens=max_tokens,
59
+ temperature=temperature,
60
+ top_p=top_p,
61
+ # stream=True, # Disable streaming for debugging
62
+ )
63
+ return response.choices[0].message["content"]
64
+
65
+ except Exception as e:
66
+ print(f"An error occurred: {e}")
67
+ return "An error occurred while processing the response."
68
 
 
 
 
 
 
 
 
 
69
 
 
 
70
 
71
  """
72
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface