Update app.py
Browse files
app.py
CHANGED
@@ -136,7 +136,7 @@ def add_text(chatbot, history, prompt, file):
|
|
136 |
if (ext == "png" or ext == "PNG" or ext == "jpg" or ext == "jpeg" or ext == "JPG" or ext == "JPEG"):
|
137 |
chatbot = chatbot +[((file.name,), None), (prompt, None)]
|
138 |
else:
|
139 |
-
chatbot = chatbot +[(get_filename(file) +"\n" + prompt, None)]
|
140 |
|
141 |
return chatbot, history, prompt, file, gr.Image(visible = False), "" #gr.Image( label=None, size=(30,30), visible=False, scale=1) #gr.Textbox(value="", interactive=False)
|
142 |
|
@@ -368,9 +368,9 @@ def generate_text_zu_doc(file, prompt, k, rag_option, chatbot, history, db):
|
|
368 |
#für Chat LLM:
|
369 |
#prompt_neu = generate_prompt_with_history_openai(neu_text_mit_chunks, history)
|
370 |
#als reiner prompt:
|
371 |
-
prompt_neu = generate_prompt_with_history(neu_text_mit_chunks, history)
|
372 |
|
373 |
-
result =
|
374 |
return result
|
375 |
|
376 |
|
@@ -407,9 +407,9 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
|
|
407 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
408 |
#Prompt an history anhängen und einen Text daraus machen
|
409 |
if (rag_option == "An"):
|
410 |
-
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
411 |
else:
|
412 |
-
history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|
413 |
else:
|
414 |
#oder an Hugging Face --------------------------
|
415 |
print("HF Anfrage.......................")
|
@@ -419,7 +419,7 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
|
|
419 |
#llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
|
420 |
print("HF")
|
421 |
#Prompt an history anhängen und einen Text daraus machen
|
422 |
-
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
423 |
|
424 |
#zusätzliche Dokumenten Splits aus DB zum Prompt hinzufügen (aus VektorDB - Chroma oder Mongo DB)
|
425 |
if (rag_option == "An"):
|
|
|
136 |
if (ext == "png" or ext == "PNG" or ext == "jpg" or ext == "jpeg" or ext == "JPG" or ext == "JPEG"):
|
137 |
chatbot = chatbot +[((file.name,), None), (prompt, None)]
|
138 |
else:
|
139 |
+
chatbot = chatbot +[("Hochgeladenes Dokument: "+ get_filename(file) +"\n" + prompt, None)]
|
140 |
|
141 |
return chatbot, history, prompt, file, gr.Image(visible = False), "" #gr.Image( label=None, size=(30,30), visible=False, scale=1) #gr.Textbox(value="", interactive=False)
|
142 |
|
|
|
368 |
#für Chat LLM:
|
369 |
#prompt_neu = generate_prompt_with_history_openai(neu_text_mit_chunks, history)
|
370 |
#als reiner prompt:
|
371 |
+
prompt_neu = generate_prompt_with_history(neu_text_mit_chunks, history)
|
372 |
|
373 |
+
result = create_assistant_file(prompt_neu, file)
|
374 |
return result
|
375 |
|
376 |
|
|
|
407 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
408 |
#Prompt an history anhängen und einen Text daraus machen
|
409 |
if (rag_option == "An"):
|
410 |
+
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
411 |
else:
|
412 |
+
history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|
413 |
else:
|
414 |
#oder an Hugging Face --------------------------
|
415 |
print("HF Anfrage.......................")
|
|
|
419 |
#llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
|
420 |
print("HF")
|
421 |
#Prompt an history anhängen und einen Text daraus machen
|
422 |
+
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
423 |
|
424 |
#zusätzliche Dokumenten Splits aus DB zum Prompt hinzufügen (aus VektorDB - Chroma oder Mongo DB)
|
425 |
if (rag_option == "An"):
|