Loewolf commited on
Commit
4ff8e05
1 Parent(s): f863056

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -1
app.py CHANGED
@@ -1,3 +1,29 @@
 
 
1
  import gradio as gr
2
 
3
- gr.Interface.load("models/Loewolf/GPT_1").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import GPT2LMHeadModel, GPT2Tokenizer
2
+ import torch
3
  import gradio as gr
4
 
5
+ # Modell und Tokenizer laden
6
+ model = GPT2LMHeadModel.from_pretrained("Loewolf/GPT_1")
7
+ tokenizer = GPT2Tokenizer.from_pretrained("Loewolf/GPT_1")
8
+
9
+ # Eine Funktion, um Fragen an GPT-2 zu stellen
10
+ def ask_gpt2(question, history):
11
+ input_ids = tokenizer.encode(history + question, return_tensors="pt")
12
+ attention_mask = torch.ones(input_ids.shape, dtype=torch.bool)
13
+
14
+ # Antwort generieren
15
+ output = model.generate(input_ids, attention_mask=attention_mask)
16
+ reply = tokenizer.decode(output[0], skip_special_tokens=True)
17
+ new_history = history + "Nutzer: " + question + "\nLöwolf GPT: " + reply + "\n"
18
+ return new_history
19
+
20
+ # Erstellen des Gradio-Interfaces
21
+ interface = gr.Interface(
22
+ fn=ask_gpt2,
23
+ inputs=[gr.inputs.Textbox(lines=2, placeholder="Stelle deine Frage hier..."), gr.inputs.Textbox(lines=10, placeholder="Chat-Verlauf...")],
24
+ outputs=gr.outputs.Textbox(label="Antwort"),
25
+ layout="vertical"
26
+ )
27
+
28
+ # Starten der Gradio-App
29
+ interface.launch()