Spaces:
Sleeping
Sleeping
binqiangliu
commited on
Commit
•
98ce940
1
Parent(s):
dbda485
Update app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,7 @@ with open(css_file) as f:
|
|
23 |
st.markdown("<style>{}</style>".format(f.read()), unsafe_allow_html=True)
|
24 |
|
25 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
26 |
-
repo_id=os.getenv("
|
27 |
model_name=os.getenv("model_name")
|
28 |
|
29 |
documents=[]
|
@@ -35,6 +35,19 @@ def generate_random_string(length):
|
|
35 |
random_string = generate_random_string(20)
|
36 |
directory_path=random_string
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
st.sidebar.markdown(
|
39 |
"""
|
40 |
<style>
|
@@ -87,8 +100,8 @@ with st.sidebar:
|
|
87 |
embed_model = LangchainEmbedding(HuggingFaceEmbeddings(model_name=model_name))
|
88 |
|
89 |
llm = HuggingFaceHub(repo_id=repo_id,
|
90 |
-
model_kwargs={"min_length":
|
91 |
-
"max_new_tokens":
|
92 |
"temperature":0.1,
|
93 |
"top_k":50,
|
94 |
"top_p":0.95, "eos_token_id":49155})
|
@@ -110,9 +123,11 @@ if question !="" and not question.strip().isspace() and not question == "" and n
|
|
110 |
loadedindex = load_index_from_storage(storage_context=storage_context, service_context=service_context)
|
111 |
query_engine = loadedindex.as_query_engine()
|
112 |
initial_response = query_engine.query(question)
|
|
|
|
|
113 |
#temp_ai_response=str(initial_response)
|
114 |
#final_ai_response=temp_ai_response.partition('<|end|>')[0]
|
115 |
-
st.write("AI Response:\n\n"+
|
116 |
#else:
|
117 |
# print("Check the Checkbox to get AI Response.")
|
118 |
# sys.exit()
|
|
|
23 |
st.markdown("<style>{}</style>".format(f.read()), unsafe_allow_html=True)
|
24 |
|
25 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
26 |
+
repo_id=os.getenv("LLM_RepoID")
|
27 |
model_name=os.getenv("model_name")
|
28 |
|
29 |
documents=[]
|
|
|
35 |
random_string = generate_random_string(20)
|
36 |
directory_path=random_string
|
37 |
|
38 |
+
print(f"定义处理多余的Context文本的函数")
|
39 |
+
def remove_context(text):
|
40 |
+
# 检查 'Context:' 是否存在
|
41 |
+
if 'Context:' in text:
|
42 |
+
# 找到第一个 '\n\n' 的位置
|
43 |
+
end_of_context = text.find('\n\n')
|
44 |
+
# 删除 'Context:' 到第一个 '\n\n' 之间的部分
|
45 |
+
return text[end_of_context + 2:] # '+2' 是为了跳过两个换行符
|
46 |
+
else:
|
47 |
+
# 如果 'Context:' 不存在,返回原始文本
|
48 |
+
return text
|
49 |
+
print(f"处理多余的Context文本函数定义结束")
|
50 |
+
|
51 |
st.sidebar.markdown(
|
52 |
"""
|
53 |
<style>
|
|
|
100 |
embed_model = LangchainEmbedding(HuggingFaceEmbeddings(model_name=model_name))
|
101 |
|
102 |
llm = HuggingFaceHub(repo_id=repo_id,
|
103 |
+
model_kwargs={"min_length":512,
|
104 |
+
"max_new_tokens":1024, "do_sample":True,
|
105 |
"temperature":0.1,
|
106 |
"top_k":50,
|
107 |
"top_p":0.95, "eos_token_id":49155})
|
|
|
123 |
loadedindex = load_index_from_storage(storage_context=storage_context, service_context=service_context)
|
124 |
query_engine = loadedindex.as_query_engine()
|
125 |
initial_response = query_engine.query(question)
|
126 |
+
cleaned_initial_ai_response=st(initial_response)
|
127 |
+
final_ai_response = cleaned_initial_ai_response.split('<|end|>\n<|system|>\n<|end|>\n<|user|>')[0].strip().replace('\n\n', '\n').replace('<|end|>', '').replace('<|user|>', '').replace('<|system|>', '').replace('<|assistant|>', '')
|
128 |
#temp_ai_response=str(initial_response)
|
129 |
#final_ai_response=temp_ai_response.partition('<|end|>')[0]
|
130 |
+
st.write("AI Response:\n\n"+final_ai_response)
|
131 |
#else:
|
132 |
# print("Check the Checkbox to get AI Response.")
|
133 |
# sys.exit()
|