Spaces:
Runtime error
Runtime error
动态endpoint
Browse files- crazy_functions/询问多个大语言模型.py +1 -1
- request_llm/bridge_chatgpt.py +6 -2
- toolbox.py +0 -1
crazy_functions/询问多个大语言模型.py
CHANGED
@@ -16,7 +16,7 @@ def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt
|
|
16 |
chatbot.append((txt, "正在同时咨询ChatGPT和ChatGLM……"))
|
17 |
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新
|
18 |
|
19 |
-
llm_kwargs['llm_model'] = '
|
20 |
gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive(
|
21 |
inputs=txt, inputs_show_user=txt,
|
22 |
llm_kwargs=llm_kwargs, chatbot=chatbot, history=history,
|
|
|
16 |
chatbot.append((txt, "正在同时咨询ChatGPT和ChatGLM……"))
|
17 |
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新
|
18 |
|
19 |
+
llm_kwargs['llm_model'] = 'gpt-3.5-turbo&api2d-gpt-3.5-turbo'
|
20 |
gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive(
|
21 |
inputs=txt, inputs_show_user=txt,
|
22 |
llm_kwargs=llm_kwargs, chatbot=chatbot, history=history,
|
request_llm/bridge_chatgpt.py
CHANGED
@@ -60,7 +60,9 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
|
|
60 |
while True:
|
61 |
try:
|
62 |
# make a POST request to the API endpoint, stream=False
|
63 |
-
|
|
|
|
|
64 |
json=payload, stream=True, timeout=TIMEOUT_SECONDS); break
|
65 |
except requests.exceptions.ReadTimeout as e:
|
66 |
retry += 1
|
@@ -148,7 +150,9 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|
148 |
while True:
|
149 |
try:
|
150 |
# make a POST request to the API endpoint, stream=True
|
151 |
-
|
|
|
|
|
152 |
json=payload, stream=True, timeout=TIMEOUT_SECONDS);break
|
153 |
except:
|
154 |
retry += 1
|
|
|
60 |
while True:
|
61 |
try:
|
62 |
# make a POST request to the API endpoint, stream=False
|
63 |
+
from .bridge_all import model_info
|
64 |
+
endpoint = model_info[llm_kwargs['llm_model']]['endpoint']
|
65 |
+
response = requests.post(endpoint, headers=headers, proxies=proxies,
|
66 |
json=payload, stream=True, timeout=TIMEOUT_SECONDS); break
|
67 |
except requests.exceptions.ReadTimeout as e:
|
68 |
retry += 1
|
|
|
150 |
while True:
|
151 |
try:
|
152 |
# make a POST request to the API endpoint, stream=True
|
153 |
+
from .bridge_all import model_info
|
154 |
+
endpoint = model_info[llm_kwargs['llm_model']]['endpoint']
|
155 |
+
response = requests.post(endpoint, headers=headers, proxies=proxies,
|
156 |
json=payload, stream=True, timeout=TIMEOUT_SECONDS);break
|
157 |
except:
|
158 |
retry += 1
|
toolbox.py
CHANGED
@@ -36,7 +36,6 @@ def ArgsGeneralWrapper(f):
|
|
36 |
llm_kwargs = {
|
37 |
'api_key': cookies['api_key'],
|
38 |
'llm_model': llm_model,
|
39 |
-
'endpoint': model_info[llm_model]['endpoint'],
|
40 |
'top_p':top_p,
|
41 |
'max_length': max_length,
|
42 |
'temperature':temperature,
|
|
|
36 |
llm_kwargs = {
|
37 |
'api_key': cookies['api_key'],
|
38 |
'llm_model': llm_model,
|
|
|
39 |
'top_p':top_p,
|
40 |
'max_length': max_length,
|
41 |
'temperature':temperature,
|