coraKong commited on
Commit
6817ce7
1 Parent(s): 41d3b6d

stream output

Browse files
Files changed (1) hide show
  1. app.py +47 -15
app.py CHANGED
@@ -113,16 +113,6 @@ def on_click_send_btn(
113
  try:
114
  completion = openai.ChatCompletion.create(**props)
115
  print('')
116
- print(completion.choices)
117
- the_response_role = completion.choices[0].message.role
118
- the_response = completion.choices[0].message.content
119
- print(the_response)
120
- print('')
121
- chat_last_resp = json.dumps(completion.__dict__)
122
- chat_last_resp_dict = json.loads(chat_last_resp)
123
- chat_last_resp_dict['api_key'] = "hidden by UI"
124
- chat_last_resp_dict['organization'] = "hidden by UI"
125
- chat_last_resp = json.dumps(chat_last_resp_dict)
126
 
127
  chat_log_md = ''
128
  if chat_use_prompt:
@@ -137,12 +127,54 @@ def on_click_send_btn(
137
  if chat_input and chat_input!="":
138
  chat_log.append([(chat_input_role or 'user'), chat_input])
139
  chat_log_md += f"##### `{(chat_input_role or 'user')}`\n\n{chat_input}\n\n"
140
- chat_log.append([the_response_role, the_response])
141
- chat_log_md += f"##### `{the_response_role}`\n\n{the_response}\n\n"
142
-
143
- return json.dumps(new_state), chat_log, chat_log_md, chat_log_md, chat_last_resp, props_json, ''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  except Exception as error:
145
  print(error)
 
146
 
147
  chat_log_md = ''
148
  if chat_use_prompt:
@@ -429,7 +461,7 @@ with gradio.Blocks(title="ChatGPT", css=css) as demo:
429
  chat_temperature = gradio.Slider(label="temperature", value=1, minimum=0, maximum=2)
430
  chat_top_p = gradio.Slider(label="top_p", value=1, minimum=0, maximum=1)
431
  chat_choices_num = gradio.Slider(label="choices num(n)", value=1, minimum=1, maximum=20)
432
- chat_stream = gradio.Checkbox(label="stream", value=False, visible=False)
433
  chat_max_tokens = gradio.Slider(label="max_tokens", value=-1, minimum=-1, maximum=4096)
434
  chat_presence_penalty = gradio.Slider(label="presence_penalty", value=0, minimum=-2, maximum=2)
435
  chat_frequency_penalty = gradio.Slider(label="frequency_penalty", value=0, minimum=-2, maximum=2)
 
113
  try:
114
  completion = openai.ChatCompletion.create(**props)
115
  print('')
 
 
 
 
 
 
 
 
 
 
116
 
117
  chat_log_md = ''
118
  if chat_use_prompt:
 
127
  if chat_input and chat_input!="":
128
  chat_log.append([(chat_input_role or 'user'), chat_input])
129
  chat_log_md += f"##### `{(chat_input_role or 'user')}`\n\n{chat_input}\n\n"
130
+
131
+ partial_words = ""
132
+ counter=0
133
+
134
+ if stream:
135
+ the_response = ''
136
+ the_response_role = ''
137
+ for chunk in completion:
138
+ #Skipping first chunk
139
+ if counter == 0:
140
+ the_response_role = chunk.choices[0].delta.role
141
+ chat_log_md += f"##### `{the_response_role}`\n\n"
142
+ counter += 1
143
+ continue
144
+ # print(('chunk', chunk))
145
+ if chunk.choices[0].finish_reason is None:
146
+ the_response_chunk = chunk.choices[0].delta.content
147
+ the_response += the_response_chunk
148
+ chat_log_md += f"{the_response_chunk}"
149
+ yield json.dumps(new_state), chat_log, chat_log_md, chat_log_md, "{}", props_json, ''
150
+ else:
151
+ chat_log.append([the_response_role, the_response])
152
+ chat_log_md += f"\n\n"
153
+ yield json.dumps(new_state), chat_log, chat_log_md, chat_log_md, '{"msg": "stream模式不支持显示"}', props_json, ''
154
+ # chat_last_resp = json.dumps(completion.__dict__)
155
+ # chat_last_resp_dict = json.loads(chat_last_resp)
156
+ # chat_last_resp_dict['api_key'] = "hidden by UI"
157
+ # chat_last_resp_dict['organization'] = "hidden by UI"
158
+ # chat_last_resp = json.dumps(chat_last_resp_dict)
159
+ else:
160
+ the_response_role = completion.choices[0].message.role
161
+ the_response = completion.choices[0].message.content
162
+ print(the_response)
163
+ print('')
164
+
165
+ chat_log.append([the_response_role, the_response])
166
+ chat_log_md += f"##### `{the_response_role}`\n\n{the_response}\n\n"
167
+
168
+ chat_last_resp = json.dumps(completion.__dict__)
169
+ chat_last_resp_dict = json.loads(chat_last_resp)
170
+ chat_last_resp_dict['api_key'] = "hidden by UI"
171
+ chat_last_resp_dict['organization'] = "hidden by UI"
172
+ chat_last_resp = json.dumps(chat_last_resp_dict)
173
+
174
+ return json.dumps(new_state), chat_log, chat_log_md, chat_log_md, chat_last_resp, props_json, ''
175
  except Exception as error:
176
  print(error)
177
+ print('error!!!!!!')
178
 
179
  chat_log_md = ''
180
  if chat_use_prompt:
 
461
  chat_temperature = gradio.Slider(label="temperature", value=1, minimum=0, maximum=2)
462
  chat_top_p = gradio.Slider(label="top_p", value=1, minimum=0, maximum=1)
463
  chat_choices_num = gradio.Slider(label="choices num(n)", value=1, minimum=1, maximum=20)
464
+ chat_stream = gradio.Checkbox(label="stream", value=True, visible=True)
465
  chat_max_tokens = gradio.Slider(label="max_tokens", value=-1, minimum=-1, maximum=4096)
466
  chat_presence_penalty = gradio.Slider(label="presence_penalty", value=0, minimum=-2, maximum=2)
467
  chat_frequency_penalty = gradio.Slider(label="frequency_penalty", value=0, minimum=-2, maximum=2)