Spaces:
Running
Running
File size: 2,657 Bytes
47289f8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
import os
"""
=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
接驳void-terminal:
- set_conf: 在运行过程中动态地修改配置
- set_multi_conf: 在运行过程中动态地修改多个配置
- get_plugin_handle: 获取插件的句柄
- get_plugin_default_kwargs: 获取插件的默认参数
- get_chat_handle: 获取简单聊天的句柄
- get_chat_default_kwargs: 获取简单聊天的默认参数
=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
"""
def get_plugin_handle(plugin_name):
"""
e.g. plugin_name = 'crazy_functions.批量Markdown翻译->Markdown翻译指定语言'
"""
import importlib
assert (
"->" in plugin_name
), "Example of plugin_name: crazy_functions.批量Markdown翻译->Markdown翻译指定语言"
module, fn_name = plugin_name.split("->")
f_hot_reload = getattr(importlib.import_module(module, fn_name), fn_name)
return f_hot_reload
def get_chat_handle():
"""
Get chat function
"""
from request_llms.bridge_all import predict_no_ui_long_connection
return predict_no_ui_long_connection
def get_plugin_default_kwargs():
"""
Get Plugin Default Arguments
"""
from toolbox import ChatBotWithCookies, load_chat_cookies
cookies = load_chat_cookies()
llm_kwargs = {
"api_key": cookies["api_key"],
"llm_model": cookies["llm_model"],
"top_p": 1.0,
"max_length": None,
"temperature": 1.0,
}
chatbot = ChatBotWithCookies(llm_kwargs)
# txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port
DEFAULT_FN_GROUPS_kwargs = {
"main_input": "./README.md",
"llm_kwargs": llm_kwargs,
"plugin_kwargs": {},
"chatbot_with_cookie": chatbot,
"history": [],
"system_prompt": "You are a good AI.",
"web_port": None,
}
return DEFAULT_FN_GROUPS_kwargs
def get_chat_default_kwargs():
"""
Get Chat Default Arguments
"""
from toolbox import load_chat_cookies
cookies = load_chat_cookies()
llm_kwargs = {
"api_key": cookies["api_key"],
"llm_model": cookies["llm_model"],
"top_p": 1.0,
"max_length": None,
"temperature": 1.0,
}
default_chat_kwargs = {
"inputs": "Hello there, are you ready?",
"llm_kwargs": llm_kwargs,
"history": [],
"sys_prompt": "You are AI assistant",
"observe_window": None,
"console_slience": False,
}
return default_chat_kwargs
|