alexkueck commited on
Commit
732f75b
1 Parent(s): 01ade36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -0
app.py CHANGED
@@ -243,6 +243,16 @@ def generate_prompt_with_history(text, history, max_length=4048):
243
  else:
244
  return None
245
 
 
 
 
 
 
 
 
 
 
 
246
 
247
  ##############################################
248
  ##############################################
@@ -270,6 +280,28 @@ def generate(text, history):
270
  top_p=0.9,
271
  temperature=0.6,
272
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
273
  #zum Evaluieren:
274
  # custom eli5 criteria
275
  custom_criterion = {"eli5": "Is the output explained in a way that a 5 yeard old would unterstand it?"}
 
243
  else:
244
  return None
245
 
246
+ #Prompt und History für OPenAi Schnittstelle
247
+ def generate_prompt_with_history_openai(prompt, history):
248
+ history_openai_format = []
249
+ for human, assistant in history:
250
+ history_openai_format.append({"role": "user", "content": human })
251
+ history_openai_format.append({"role": "assistant", "content":assistant})
252
+
253
+ history_openai_format.append({"role": "user", "content": prompt})
254
+ return history_openai_format
255
+
256
 
257
  ##############################################
258
  ##############################################
 
280
  top_p=0.9,
281
  temperature=0.6,
282
  )
283
+
284
+
285
+
286
+ try:
287
+ #muss nur einmal ausgeführt werden...
288
+ if not splittet:
289
+ splits = document_loading_splitting()
290
+ document_storage_chroma(splits)
291
+ #db = document_retrieval_chroma(llm, history_text_und_prompt)
292
+ #result = rag_chain(llm, history_text_und_prompt, db)
293
+
294
+ except Exception as e:
295
+ raise gr.Error(e)
296
+
297
+ #Antwort als Stream ausgeben...
298
+ #for i in range(len(result)):
299
+ #time.sleep(0.05)
300
+ #yield result[: i+1]
301
+
302
+
303
+
304
+
305
  #zum Evaluieren:
306
  # custom eli5 criteria
307
  custom_criterion = {"eli5": "Is the output explained in a way that a 5 yeard old would unterstand it?"}