09util commited on
Commit
11f6c3a
1 Parent(s): 7d572d2

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -0
app.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+
4
+ model_name = "microsoft/Phi-3.5-MoE-instruct"
5
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
6
+ model = AutoModelForCausalLM.from_pretrained(model_name)
7
+
8
+ def generate_response(instruction):
9
+ input_text = f"Human: {instruction}\n\nAssistant:"
10
+ inputs = tokenizer(input_text, return_tensors="pt")
11
+
12
+ outputs = model.generate(**inputs, max_length=200, num_return_sequences=1)
13
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
14
+
15
+ return response.split("Assistant:")[-1].strip()
16
+
17
+ iface = gr.Interface(
18
+ fn=generate_response,
19
+ inputs="text",
20
+ outputs="text",
21
+ title="Phi-3.5-MoE-instruct Demo",
22
+ description="Enter an instruction or question to get a response from the model."
23
+ )
24
+
25
+ iface.launch()