Update app.py
Browse files
app.py
CHANGED
@@ -288,10 +288,10 @@ def invoke (prompt, history, rag_option, model_option, openai_api_key, temperat
|
|
288 |
global splittet
|
289 |
print(splittet)
|
290 |
#Prompt an history anhängen und einen Text daraus machen
|
291 |
-
|
292 |
|
293 |
#history für HuggingFace Models formatieren
|
294 |
-
history_text_und_prompt = generate_prompt_with_history_hf(prompt, history)
|
295 |
|
296 |
#history für openAi formatieren
|
297 |
#history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|
@@ -364,7 +364,7 @@ def vote(data: gr.LikeData):
|
|
364 |
|
365 |
additional_inputs = [
|
366 |
#gr.Radio(["Off", "Chroma", "MongoDB"], label="Retrieval Augmented Generation", value = "Off"),
|
367 |
-
gr.Radio(["Aus", "An"], label="RAG - LI Erweiterungen", value = "
|
368 |
gr.Radio(["OpenAI", "HuggingFace"], label="Modellauswahl", value = "HuggingFace"),
|
369 |
gr.Textbox(label = "OpenAI API Key", value = "sk-", lines = 1),
|
370 |
gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Höhere Werte erzeugen diversere Antworten", visible=True),
|
|
|
288 |
global splittet
|
289 |
print(splittet)
|
290 |
#Prompt an history anhängen und einen Text daraus machen
|
291 |
+
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
292 |
|
293 |
#history für HuggingFace Models formatieren
|
294 |
+
#history_text_und_prompt = generate_prompt_with_history_hf(prompt, history)
|
295 |
|
296 |
#history für openAi formatieren
|
297 |
#history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|
|
|
364 |
|
365 |
additional_inputs = [
|
366 |
#gr.Radio(["Off", "Chroma", "MongoDB"], label="Retrieval Augmented Generation", value = "Off"),
|
367 |
+
gr.Radio(["Aus", "An"], label="RAG - LI Erweiterungen", value = "Aus"),
|
368 |
gr.Radio(["OpenAI", "HuggingFace"], label="Modellauswahl", value = "HuggingFace"),
|
369 |
gr.Textbox(label = "OpenAI API Key", value = "sk-", lines = 1),
|
370 |
gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Höhere Werte erzeugen diversere Antworten", visible=True),
|