Spaces:
Runtime error
Runtime error
添加Golang、Java等项目的支持
Browse files- README.md +5 -7
- crazy_functions/下载arxiv论文翻译摘要.py +3 -4
- crazy_functions/解析项目源代码.py +32 -43
- functional_crazy.py +33 -27
- main.py +3 -4
- predict.py +2 -9
README.md
CHANGED
@@ -36,16 +36,14 @@ https://github.com/polarwinkel/mdtex2html
|
|
36 |
自定义快捷键 | 支持自定义快捷键
|
37 |
配置代理服务器 | 支持配置代理服务器
|
38 |
模块化设计 | 支持自定义高阶的实验性功能
|
39 |
-
自我程序剖析 | [
|
40 |
-
程序剖析 | [
|
41 |
-
读论文 | [
|
42 |
-
|
43 |
-
|
44 |
-
chat分析报告生成 | [函数插件] 运行后自动生成总结汇报
|
45 |
公式显示 | 可以同时显示公式的tex形式和渲染形式
|
46 |
图片显示 | 可以在markdown中显示图片
|
47 |
支持GPT输出的markdown表格 | 可以输出支持GPT的markdown表格
|
48 |
-
本地大语言模型接口 | 借助[TGUI](https://github.com/oobabooga/text-generation-webui)接入galactica等本地语言模型
|
49 |
…… | ……
|
50 |
|
51 |
</div>
|
|
|
36 |
自定义快捷键 | 支持自定义快捷键
|
37 |
配置代理服务器 | 支持配置代理服务器
|
38 |
模块化设计 | 支持自定义高阶的实验性功能
|
39 |
+
自我程序剖析 | [实验性功能] 一键读懂本项目的源代码
|
40 |
+
程序剖析 | [实验性功能] 一键可以剖析其他Python/C++项目
|
41 |
+
读论文 | [实验性功能] 一键解读latex论文全文并生成摘要
|
42 |
+
批量注释生成 | [实验性功能] 一键批量生成函数注释
|
43 |
+
chat分析报告生成 | [实验性功能] 运行后自动生成总结汇报
|
|
|
44 |
公式显示 | 可以同时显示公式的tex形式和渲染形式
|
45 |
图片显示 | 可以在markdown中显示图片
|
46 |
支持GPT输出的markdown表格 | 可以输出支持GPT的markdown表格
|
|
|
47 |
…… | ……
|
48 |
|
49 |
</div>
|
crazy_functions/下载arxiv论文翻译摘要.py
CHANGED
@@ -134,8 +134,7 @@ def get_name(_url_):
|
|
134 |
@CatchException
|
135 |
def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
136 |
|
137 |
-
CRAZY_FUNCTION_INFO = "下载arxiv
|
138 |
-
raise RuntimeError()
|
139 |
import glob
|
140 |
import os
|
141 |
|
@@ -180,8 +179,8 @@ def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history,
|
|
180 |
# 写入文件
|
181 |
import shutil
|
182 |
# 重置文件的创建时间
|
183 |
-
shutil.copyfile(pdf_path,
|
184 |
res = write_results_to_file(history)
|
185 |
-
chatbot.append(("完成了吗?", res))
|
186 |
yield chatbot, history, msg
|
187 |
|
|
|
134 |
@CatchException
|
135 |
def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
136 |
|
137 |
+
CRAZY_FUNCTION_INFO = "下载arxiv论文并翻译摘要,函数插件作者[binary-husky]。正在提取摘要并下载PDF文档……"
|
|
|
138 |
import glob
|
139 |
import os
|
140 |
|
|
|
179 |
# 写入文件
|
180 |
import shutil
|
181 |
# 重置文件的创建时间
|
182 |
+
shutil.copyfile(pdf_path, f'./gpt_log/{os.path.basename(pdf_path)}'); os.remove(pdf_path)
|
183 |
res = write_results_to_file(history)
|
184 |
+
chatbot.append(("完成了吗?", res + "\n\nPDF文件也已经下载"))
|
185 |
yield chatbot, history, msg
|
186 |
|
crazy_functions/解析项目源代码.py
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
from predict import predict_no_ui
|
2 |
from toolbox import CatchException, report_execption, write_results_to_file, predict_no_ui_but_counting_down
|
3 |
-
|
4 |
fast_debug = False
|
5 |
|
6 |
-
|
7 |
def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
|
8 |
import time, glob, os
|
9 |
print('begin analysis on:', file_manifest)
|
@@ -11,22 +9,20 @@ def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot,
|
|
11 |
with open(fp, 'r', encoding='utf-8') as f:
|
12 |
file_content = f.read()
|
13 |
|
14 |
-
prefix = "接下来请你逐文件分析下面的工程" if index
|
15 |
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{os.path.relpath(fp, project_folder)},文件代码是 ```{file_content}```'
|
16 |
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
17 |
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
18 |
yield chatbot, history, '正常'
|
19 |
|
20 |
-
if not fast_debug:
|
21 |
msg = '正常'
|
22 |
|
23 |
# ** gpt request **
|
24 |
-
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature,
|
25 |
-
history=[]) # 带超时倒计时
|
26 |
|
27 |
chatbot[-1] = (i_say_show_user, gpt_say)
|
28 |
-
history.append(i_say_show_user);
|
29 |
-
history.append(gpt_say)
|
30 |
yield chatbot, history, msg
|
31 |
if not fast_debug: time.sleep(2)
|
32 |
|
@@ -35,48 +31,45 @@ def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot,
|
|
35 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
36 |
yield chatbot, history, '正常'
|
37 |
|
38 |
-
if not fast_debug:
|
39 |
msg = '正常'
|
40 |
# ** gpt request **
|
41 |
-
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature,
|
42 |
-
|
43 |
-
|
44 |
chatbot[-1] = (i_say, gpt_say)
|
45 |
-
history.append(i_say);
|
46 |
-
history.append(gpt_say)
|
47 |
yield chatbot, history, msg
|
48 |
res = write_results_to_file(history)
|
49 |
chatbot.append(("完成了吗?", res))
|
50 |
yield chatbot, history, msg
|
51 |
|
52 |
|
|
|
|
|
53 |
@CatchException
|
54 |
def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
55 |
-
history = []
|
56 |
import time, glob, os
|
57 |
file_manifest = [f for f in glob.glob('./*.py') if ('test_project' not in f) and ('gpt_log' not in f)] + \
|
58 |
-
[f for f in glob.glob('./crazy_functions/*.py') if
|
59 |
-
('test_project' not in f) and ('gpt_log' not in f)]
|
60 |
for index, fp in enumerate(file_manifest):
|
61 |
# if 'test_project' in fp: continue
|
62 |
with open(fp, 'r', encoding='utf-8') as f:
|
63 |
file_content = f.read()
|
64 |
|
65 |
-
prefix = "接下来请你分析自己的程序构成,别紧张," if index
|
66 |
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{fp},文件代码是 ```{file_content}```'
|
67 |
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
68 |
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
69 |
yield chatbot, history, '正常'
|
70 |
|
71 |
-
if not fast_debug:
|
72 |
# ** gpt request **
|
73 |
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature)
|
74 |
-
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature,
|
75 |
-
history=[], long_connection=True) # 带超时倒计时
|
76 |
|
77 |
chatbot[-1] = (i_say_show_user, gpt_say)
|
78 |
-
history.append(i_say_show_user);
|
79 |
-
history.append(gpt_say)
|
80 |
yield chatbot, history, '正常'
|
81 |
time.sleep(2)
|
82 |
|
@@ -84,35 +77,32 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
|
|
84 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
85 |
yield chatbot, history, '正常'
|
86 |
|
87 |
-
if not fast_debug:
|
88 |
# ** gpt request **
|
89 |
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history)
|
90 |
-
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history,
|
91 |
-
long_connection=True) # 带超时倒计时
|
92 |
|
93 |
chatbot[-1] = (i_say, gpt_say)
|
94 |
-
history.append(i_say);
|
95 |
-
history.append(gpt_say)
|
96 |
yield chatbot, history, '正常'
|
97 |
res = write_results_to_file(history)
|
98 |
chatbot.append(("完成了吗?", res))
|
99 |
yield chatbot, history, '正常'
|
100 |
|
101 |
-
|
102 |
@CatchException
|
103 |
def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
104 |
-
history = []
|
105 |
import glob, os
|
106 |
if os.path.exists(txt):
|
107 |
project_folder = txt
|
108 |
else:
|
109 |
if txt == "": txt = '空空如也的输入栏'
|
110 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
111 |
yield chatbot, history, '正常'
|
112 |
return
|
113 |
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)]
|
114 |
if len(file_manifest) == 0:
|
115 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何python文件: {txt}")
|
116 |
yield chatbot, history, '正常'
|
117 |
return
|
118 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
@@ -120,41 +110,40 @@ def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPr
|
|
120 |
|
121 |
@CatchException
|
122 |
def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
123 |
-
history = []
|
124 |
import glob, os
|
125 |
if os.path.exists(txt):
|
126 |
project_folder = txt
|
127 |
else:
|
128 |
if txt == "": txt = '空空如也的输入栏'
|
129 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
130 |
yield chatbot, history, '正常'
|
131 |
return
|
132 |
-
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)]
|
133 |
-
|
134 |
-
|
135 |
if len(file_manifest) == 0:
|
136 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.h头文件: {txt}")
|
137 |
yield chatbot, history, '正常'
|
138 |
return
|
139 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
140 |
|
141 |
-
|
142 |
@CatchException
|
143 |
def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
144 |
-
history = []
|
145 |
import glob, os
|
146 |
if os.path.exists(txt):
|
147 |
project_folder = txt
|
148 |
else:
|
149 |
if txt == "": txt = '空空如也的输入栏'
|
150 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
151 |
yield chatbot, history, '正常'
|
152 |
return
|
153 |
-
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)]
|
154 |
[f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
155 |
[f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
156 |
if len(file_manifest) == 0:
|
157 |
-
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.h头文件: {txt}")
|
158 |
yield chatbot, history, '正常'
|
159 |
return
|
160 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
|
|
1 |
from predict import predict_no_ui
|
2 |
from toolbox import CatchException, report_execption, write_results_to_file, predict_no_ui_but_counting_down
|
|
|
3 |
fast_debug = False
|
4 |
|
|
|
5 |
def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
|
6 |
import time, glob, os
|
7 |
print('begin analysis on:', file_manifest)
|
|
|
9 |
with open(fp, 'r', encoding='utf-8') as f:
|
10 |
file_content = f.read()
|
11 |
|
12 |
+
prefix = "接下来请你逐文件分析下面的工程" if index==0 else ""
|
13 |
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{os.path.relpath(fp, project_folder)},文件代码是 ```{file_content}```'
|
14 |
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
15 |
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
16 |
yield chatbot, history, '正常'
|
17 |
|
18 |
+
if not fast_debug:
|
19 |
msg = '正常'
|
20 |
|
21 |
# ** gpt request **
|
22 |
+
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时
|
|
|
23 |
|
24 |
chatbot[-1] = (i_say_show_user, gpt_say)
|
25 |
+
history.append(i_say_show_user); history.append(gpt_say)
|
|
|
26 |
yield chatbot, history, msg
|
27 |
if not fast_debug: time.sleep(2)
|
28 |
|
|
|
31 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
32 |
yield chatbot, history, '正常'
|
33 |
|
34 |
+
if not fast_debug:
|
35 |
msg = '正常'
|
36 |
# ** gpt request **
|
37 |
+
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时
|
38 |
+
|
|
|
39 |
chatbot[-1] = (i_say, gpt_say)
|
40 |
+
history.append(i_say); history.append(gpt_say)
|
|
|
41 |
yield chatbot, history, msg
|
42 |
res = write_results_to_file(history)
|
43 |
chatbot.append(("完成了吗?", res))
|
44 |
yield chatbot, history, msg
|
45 |
|
46 |
|
47 |
+
|
48 |
+
|
49 |
@CatchException
|
50 |
def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
51 |
+
history = [] # 清空历史,以免输入溢出
|
52 |
import time, glob, os
|
53 |
file_manifest = [f for f in glob.glob('./*.py') if ('test_project' not in f) and ('gpt_log' not in f)] + \
|
54 |
+
[f for f in glob.glob('./crazy_functions/*.py') if ('test_project' not in f) and ('gpt_log' not in f)]
|
|
|
55 |
for index, fp in enumerate(file_manifest):
|
56 |
# if 'test_project' in fp: continue
|
57 |
with open(fp, 'r', encoding='utf-8') as f:
|
58 |
file_content = f.read()
|
59 |
|
60 |
+
prefix = "接下来请你分析自己的程序构成,别紧张," if index==0 else ""
|
61 |
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{fp},文件代码是 ```{file_content}```'
|
62 |
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
63 |
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
64 |
yield chatbot, history, '正常'
|
65 |
|
66 |
+
if not fast_debug:
|
67 |
# ** gpt request **
|
68 |
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature)
|
69 |
+
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[], long_connection=True) # 带超时倒计时
|
|
|
70 |
|
71 |
chatbot[-1] = (i_say_show_user, gpt_say)
|
72 |
+
history.append(i_say_show_user); history.append(gpt_say)
|
|
|
73 |
yield chatbot, history, '正常'
|
74 |
time.sleep(2)
|
75 |
|
|
|
77 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
78 |
yield chatbot, history, '正常'
|
79 |
|
80 |
+
if not fast_debug:
|
81 |
# ** gpt request **
|
82 |
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history)
|
83 |
+
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history, long_connection=True) # 带超时倒计时
|
|
|
84 |
|
85 |
chatbot[-1] = (i_say, gpt_say)
|
86 |
+
history.append(i_say); history.append(gpt_say)
|
|
|
87 |
yield chatbot, history, '正常'
|
88 |
res = write_results_to_file(history)
|
89 |
chatbot.append(("完成了吗?", res))
|
90 |
yield chatbot, history, '正常'
|
91 |
|
|
|
92 |
@CatchException
|
93 |
def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
94 |
+
history = [] # 清空历史,以免输入溢出
|
95 |
import glob, os
|
96 |
if os.path.exists(txt):
|
97 |
project_folder = txt
|
98 |
else:
|
99 |
if txt == "": txt = '空空如也的输入栏'
|
100 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
101 |
yield chatbot, history, '正常'
|
102 |
return
|
103 |
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)]
|
104 |
if len(file_manifest) == 0:
|
105 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何python文件: {txt}")
|
106 |
yield chatbot, history, '正常'
|
107 |
return
|
108 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
|
|
110 |
|
111 |
@CatchException
|
112 |
def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
113 |
+
history = [] # 清空历史,以免输入溢出
|
114 |
import glob, os
|
115 |
if os.path.exists(txt):
|
116 |
project_folder = txt
|
117 |
else:
|
118 |
if txt == "": txt = '空空如也的输入栏'
|
119 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
120 |
yield chatbot, history, '正常'
|
121 |
return
|
122 |
+
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] # + \
|
123 |
+
# [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
124 |
+
# [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
125 |
if len(file_manifest) == 0:
|
126 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
|
127 |
yield chatbot, history, '正常'
|
128 |
return
|
129 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
130 |
|
|
|
131 |
@CatchException
|
132 |
def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
133 |
+
history = [] # 清空历史,以免输入溢出
|
134 |
import glob, os
|
135 |
if os.path.exists(txt):
|
136 |
project_folder = txt
|
137 |
else:
|
138 |
if txt == "": txt = '空空如也的输入栏'
|
139 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
140 |
yield chatbot, history, '正常'
|
141 |
return
|
142 |
+
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \
|
143 |
[f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
144 |
[f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
145 |
if len(file_manifest) == 0:
|
146 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
|
147 |
yield chatbot, history, '正常'
|
148 |
return
|
149 |
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
functional_crazy.py
CHANGED
@@ -1,13 +1,8 @@
|
|
1 |
from toolbox import HotReload # HotReload 的意思是热更新,修改函数插件后,不需要重启程序,代码直接生效
|
2 |
|
3 |
-
# UserVisibleLevel是过滤器参数。
|
4 |
-
# 由于UI界面空间有限,所以通过这种方式决定UI界面中显示哪些插件
|
5 |
-
# 默认函数插件 VisibleLevel 是 0
|
6 |
-
# 当 UserVisibleLevel >= 函数插件的 VisibleLevel 时,该函数插件才会被显示出来
|
7 |
-
UserVisibleLevel = 1
|
8 |
-
|
9 |
-
|
10 |
def get_crazy_functionals():
|
|
|
|
|
11 |
from crazy_functions.读文章写摘要 import 读文章写摘要
|
12 |
from crazy_functions.生成函数注释 import 批量生成函数注释
|
13 |
from crazy_functions.解析项目源代码 import 解析项目本身
|
@@ -70,33 +65,44 @@ def get_crazy_functionals():
|
|
70 |
"Function": HotReload(高阶功能模板函数)
|
71 |
},
|
72 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
from crazy_functions
|
78 |
-
from crazy_functions.总结word文档 import 总结word文档
|
79 |
function_plugins.update({
|
80 |
-
"
|
81 |
-
"Color": "stop",
|
82 |
-
"Function": HotReload(批量总结PDF文档) # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
83 |
-
},
|
84 |
-
"[仅供开发调试] 批量总结PDF文档pdfminer": {
|
85 |
"Color": "stop",
|
86 |
"AsButton": False, # 加入下拉菜单中
|
87 |
-
"Function": HotReload(
|
88 |
-
}
|
89 |
-
"[仅供开发调试] 批量总结Word文档": {
|
90 |
-
"Color": "stop",
|
91 |
-
"Function": HotReload(总结word文档)
|
92 |
-
},
|
93 |
})
|
|
|
|
|
|
|
94 |
|
95 |
-
# VisibleLevel=2 尚未充分测试的函数插件,放在这里
|
96 |
-
if UserVisibleLevel >= 2:
|
97 |
-
function_plugins.update({
|
98 |
-
})
|
99 |
|
|
|
100 |
return function_plugins
|
101 |
|
102 |
|
|
|
1 |
from toolbox import HotReload # HotReload 的意思是热更新,修改函数插件后,不需要重启程序,代码直接生效
|
2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
def get_crazy_functionals():
|
4 |
+
###################### 第一组插件 ###########################
|
5 |
+
# [第一组插件]: 最早期编写的项目插件和一些demo
|
6 |
from crazy_functions.读文章写摘要 import 读文章写摘要
|
7 |
from crazy_functions.生成函数注释 import 批量生成函数注释
|
8 |
from crazy_functions.解析项目源代码 import 解析项目本身
|
|
|
65 |
"Function": HotReload(高阶功能模板函数)
|
66 |
},
|
67 |
}
|
68 |
+
###################### 第二组插件 ###########################
|
69 |
+
# [第二组插件]: 经过充分测试,但功能上距离达到完美状态还差一点点
|
70 |
+
from crazy_functions.批量总结PDF文档 import 批量总结PDF文档
|
71 |
+
from crazy_functions.批量总结PDF文档pdfminer import 批量总结PDF文档pdfminer
|
72 |
+
from crazy_functions.总结word文档 import 总结word文档
|
73 |
+
function_plugins.update({
|
74 |
+
"[仅供开发调试] 批量总结PDF文档": {
|
75 |
+
"Color": "stop",
|
76 |
+
"Function": HotReload(批量总结PDF文档) # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
77 |
+
},
|
78 |
+
"[仅供开发调试] 批量总结PDF文档pdfminer": {
|
79 |
+
"Color": "stop",
|
80 |
+
"AsButton": False, # 加入下拉菜单中
|
81 |
+
"Function": HotReload(批量总结PDF文档pdfminer)
|
82 |
+
},
|
83 |
+
"[仅供开发调试] 批量总结Word文档": {
|
84 |
+
"Color": "stop",
|
85 |
+
"Function": HotReload(总结word文档)
|
86 |
+
},
|
87 |
+
})
|
88 |
|
89 |
+
###################### 第三组插件 ###########################
|
90 |
+
# [第三组插件]: 尚未充分测试的函数插件,放在这里
|
91 |
+
try:
|
92 |
+
from crazy_functions.下载arxiv论文翻译摘要 import 下载arxiv论文并翻译摘要
|
|
|
93 |
function_plugins.update({
|
94 |
+
"下载arxiv论文并翻译摘要": {
|
|
|
|
|
|
|
|
|
95 |
"Color": "stop",
|
96 |
"AsButton": False, # 加入下拉菜单中
|
97 |
+
"Function": HotReload(下载arxiv论文并翻译摘要)
|
98 |
+
}
|
|
|
|
|
|
|
|
|
99 |
})
|
100 |
+
except Exception as err:
|
101 |
+
print(f'[下载arxiv论文并翻译摘要] 插件导入失败 {str(err)}')
|
102 |
+
|
103 |
|
|
|
|
|
|
|
|
|
104 |
|
105 |
+
###################### 第n组插件 ###########################
|
106 |
return function_plugins
|
107 |
|
108 |
|
main.py
CHANGED
@@ -11,9 +11,8 @@ proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT =
|
|
11 |
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
|
12 |
if not AUTHENTICATION: AUTHENTICATION = None
|
13 |
|
14 |
-
title = "ChatGPT 学术优化" if LLM_MODEL.startswith('gpt') else "ChatGPT / LLM 学术优化"
|
15 |
initial_prompt = "Serve me as a writing and programming assistant."
|
16 |
-
title_html =
|
17 |
|
18 |
# 问询记录, python 版本建议3.9+(越新越好)
|
19 |
import logging
|
@@ -120,7 +119,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
|
|
120 |
dropdown.select(on_dropdown_changed, [dropdown], [switchy_bt] )
|
121 |
# 随变按钮的回调函数注册
|
122 |
def route(k, *args, **kwargs):
|
123 |
-
if k in [r"打开插件列表", r"
|
124 |
yield from crazy_fns[k]["Function"](*args, **kwargs)
|
125 |
click_handle = switchy_bt.click(route,[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
|
126 |
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
@@ -141,5 +140,5 @@ def auto_opentab_delay():
|
|
141 |
threading.Thread(target=open, name="open-browser", daemon=True).start()
|
142 |
|
143 |
auto_opentab_delay()
|
144 |
-
demo.title =
|
145 |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=True, server_port=PORT, auth=AUTHENTICATION)
|
|
|
11 |
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
|
12 |
if not AUTHENTICATION: AUTHENTICATION = None
|
13 |
|
|
|
14 |
initial_prompt = "Serve me as a writing and programming assistant."
|
15 |
+
title_html = """<h1 align="center">ChatGPT 学术优化</h1>"""
|
16 |
|
17 |
# 问询记录, python 版本建议3.9+(越新越好)
|
18 |
import logging
|
|
|
119 |
dropdown.select(on_dropdown_changed, [dropdown], [switchy_bt] )
|
120 |
# 随变按钮的回调函数注册
|
121 |
def route(k, *args, **kwargs):
|
122 |
+
if k in [r"打开插件列表", r"请先从插件列表中选择"]: return
|
123 |
yield from crazy_fns[k]["Function"](*args, **kwargs)
|
124 |
click_handle = switchy_bt.click(route,[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
|
125 |
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
|
|
140 |
threading.Thread(target=open, name="open-browser", daemon=True).start()
|
141 |
|
142 |
auto_opentab_delay()
|
143 |
+
demo.title = "ChatGPT 学术优化"
|
144 |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=True, server_port=PORT, auth=AUTHENTICATION)
|
predict.py
CHANGED
@@ -112,7 +112,8 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr
|
|
112 |
return result
|
113 |
|
114 |
|
115 |
-
def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='',
|
|
|
116 |
"""
|
117 |
发送至chatGPT,流式获取输出。
|
118 |
用于基础的对话功能。
|
@@ -243,11 +244,3 @@ def generate_payload(inputs, top_p, temperature, history, system_prompt, stream)
|
|
243 |
return headers,payload
|
244 |
|
245 |
|
246 |
-
if not LLM_MODEL.startswith('gpt'):
|
247 |
-
# 函数重载到另一个文件
|
248 |
-
from request_llm.bridge_tgui import predict_tgui, predict_tgui_no_ui
|
249 |
-
predict = predict_tgui
|
250 |
-
predict_no_ui = predict_tgui_no_ui
|
251 |
-
predict_no_ui_long_connection = predict_tgui_no_ui
|
252 |
-
|
253 |
-
|
|
|
112 |
return result
|
113 |
|
114 |
|
115 |
+
def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='',
|
116 |
+
stream = True, additional_fn=None):
|
117 |
"""
|
118 |
发送至chatGPT,流式获取输出。
|
119 |
用于基础的对话功能。
|
|
|
244 |
return headers,payload
|
245 |
|
246 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|