Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -240,7 +240,7 @@ def generate_prompt_with_history_openai(prompt, history):
|
|
240 |
##############################################
|
241 |
# generate function
|
242 |
##############################################
|
243 |
-
def generate(text, history, rag_option, model_option,
|
244 |
#mit RAG
|
245 |
#später entsprechend mit Vektorstore...
|
246 |
#context="Nuremberg is the second-largest city of the German state of Bavaria after its capital Munich, and its 541,000 inhabitants make it the 14th-largest city in Germany. On the Pegnitz River (from its confluence with the Rednitz in Fürth onwards: Regnitz, a tributary of the River Main) and the Rhine–Main–Danube Canal, it lies in the Bavarian administrative region of Middle Franconia, and is the largest city and the unofficial capital of Franconia. Nuremberg forms with the neighbouring cities of Fürth, Erlangen and Schwabach a continuous conurbation with a total population of 812,248 (2022), which is the heart of the urban area region with around 1.4 million inhabitants,[4] while the larger Nuremberg Metropolitan Region has approximately 3.6 million inhabitants. The city lies about 170 kilometres (110 mi) north of Munich. It is the largest city in the East Franconian dialect area."
|
@@ -304,6 +304,7 @@ evaluator = load_evaluator("criteria", criteria="conciseness", llm=evaluation_ll
|
|
304 |
###############################################
|
305 |
#Beschreibung oben in GUI
|
306 |
################################################
|
|
|
307 |
|
308 |
description = """<strong>Information:</strong> Hier wird ein <strong>Large Language Model (LLM)</strong> mit
|
309 |
<strong>Retrieval Augmented Generation (RAG)</strong> auf <strong>externen Daten</strong> verwendet.\n\n
|
@@ -311,15 +312,9 @@ description = """<strong>Information:</strong> Hier wird ein <strong>Large Langu
|
|
311 |
css = """.toast-wrap { display: none !important } """
|
312 |
examples=[['Was ist ChtGPT-4?'],['schreibe ein Python Programm, dass die GPT-4 API aufruft.']]
|
313 |
|
314 |
-
def vote(data: gr.LikeData):
|
315 |
-
if data.liked: print("You upvoted this response: " + data.value)
|
316 |
-
else: print("You downvoted this response: " + data.value)
|
317 |
-
|
318 |
additional_inputs = [
|
319 |
-
#gr.Radio(["Off", "Chroma", "MongoDB"], label="Retrieval Augmented Generation", value = "Off"),
|
320 |
gr.Radio(["Aus", "An"], label="RAG - LI Erweiterungen", value = "Aus"),
|
321 |
gr.Radio(["OpenAI", "HuggingFace"], label="Modellauswahl", value = "HuggingFace"),
|
322 |
-
gr.Textbox(label = "OpenAI API Key", value = "sk-", lines = 1),
|
323 |
gr.Slider(label="Temperature", value=0.65, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Höhere Werte erzeugen diversere Antworten", visible=True),
|
324 |
gr.Slider(label="Max new tokens", value=1024, minimum=0, maximum=4096, step=64, interactive=True, info="Maximale Anzahl neuer Tokens", visible=True),
|
325 |
gr.Slider(label="Top-p (nucleus sampling)", value=0.6, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Höhere Werte verwenden auch Tokens mit niedrigerer Wahrscheinlichkeit.", visible=True),
|
@@ -339,8 +334,9 @@ chat_interface_stream = gr.ChatInterface(fn=generate,
|
|
339 |
submit_btn = "Abschicken",
|
340 |
description = description)
|
341 |
|
|
|
342 |
with gr.Blocks() as demo:
|
343 |
with gr.Tab("Chatbot"):
|
344 |
chatbot_stream.like(vote, None, None)
|
345 |
-
|
346 |
-
|
|
|
240 |
##############################################
|
241 |
# generate function
|
242 |
##############################################
|
243 |
+
def generate(text, history, rag_option, model_option, temperature=0.5, max_new_tokens=4048, top_p=0.6, repetition_penalty=1.3):
|
244 |
#mit RAG
|
245 |
#später entsprechend mit Vektorstore...
|
246 |
#context="Nuremberg is the second-largest city of the German state of Bavaria after its capital Munich, and its 541,000 inhabitants make it the 14th-largest city in Germany. On the Pegnitz River (from its confluence with the Rednitz in Fürth onwards: Regnitz, a tributary of the River Main) and the Rhine–Main–Danube Canal, it lies in the Bavarian administrative region of Middle Franconia, and is the largest city and the unofficial capital of Franconia. Nuremberg forms with the neighbouring cities of Fürth, Erlangen and Schwabach a continuous conurbation with a total population of 812,248 (2022), which is the heart of the urban area region with around 1.4 million inhabitants,[4] while the larger Nuremberg Metropolitan Region has approximately 3.6 million inhabitants. The city lies about 170 kilometres (110 mi) north of Munich. It is the largest city in the East Franconian dialect area."
|
|
|
304 |
###############################################
|
305 |
#Beschreibung oben in GUI
|
306 |
################################################
|
307 |
+
print ("Start GUI")
|
308 |
|
309 |
description = """<strong>Information:</strong> Hier wird ein <strong>Large Language Model (LLM)</strong> mit
|
310 |
<strong>Retrieval Augmented Generation (RAG)</strong> auf <strong>externen Daten</strong> verwendet.\n\n
|
|
|
312 |
css = """.toast-wrap { display: none !important } """
|
313 |
examples=[['Was ist ChtGPT-4?'],['schreibe ein Python Programm, dass die GPT-4 API aufruft.']]
|
314 |
|
|
|
|
|
|
|
|
|
315 |
additional_inputs = [
|
|
|
316 |
gr.Radio(["Aus", "An"], label="RAG - LI Erweiterungen", value = "Aus"),
|
317 |
gr.Radio(["OpenAI", "HuggingFace"], label="Modellauswahl", value = "HuggingFace"),
|
|
|
318 |
gr.Slider(label="Temperature", value=0.65, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Höhere Werte erzeugen diversere Antworten", visible=True),
|
319 |
gr.Slider(label="Max new tokens", value=1024, minimum=0, maximum=4096, step=64, interactive=True, info="Maximale Anzahl neuer Tokens", visible=True),
|
320 |
gr.Slider(label="Top-p (nucleus sampling)", value=0.6, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Höhere Werte verwenden auch Tokens mit niedrigerer Wahrscheinlichkeit.", visible=True),
|
|
|
334 |
submit_btn = "Abschicken",
|
335 |
description = description)
|
336 |
|
337 |
+
print("Build GUI")
|
338 |
with gr.Blocks() as demo:
|
339 |
with gr.Tab("Chatbot"):
|
340 |
chatbot_stream.like(vote, None, None)
|
341 |
+
chat_interface_stream.queue().launch()
|
342 |
+
|