tevykuch commited on
Commit
e9d731a
1 Parent(s): 573771c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -9
app.py CHANGED
@@ -1,19 +1,30 @@
1
- from transformers import AutoTokenizer, AutoModel
2
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  def predict(input_text):
5
- inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True, max_length=512)
 
 
 
 
6
  with torch.no_grad():
7
- outputs = model(**inputs)
8
-
9
- predictions = torch.softmax(outputs.logits, dim=-1)
10
- predicted_class = predictions.argmax().item()
11
-
12
  return f"Predicted class: {predicted_class}"
13
 
14
-
15
  iface = gr.Interface(fn=predict,
16
- inputs=gr.inputs.Textbox(lines=2, placeholder="Type your text here..."),
17
  outputs="text",
18
  title="My Model Demo",
19
  description="Enter some text to see the model prediction.")
 
1
+ from transformers import AutoModel
2
  import gradio as gr
3
+ import fasttext
4
+ import torch
5
+ from huggingface_hub import hf_hub_download
6
+
7
+ repo_id = "tevykuch/zeroshot-reptile"
8
+ filename = "metalearn_wordy.bin"
9
+ model_path = hf_hub_download(repo_id=repo_id, filename=filename)
10
+
11
+ fasttext_model = fasttext.load_model(model_path)
12
+ model = AutoModel.from_pretrained(repo_id)
13
+
14
 
15
  def predict(input_text):
16
+ words = input_text.split()
17
+ embeddings = torch.tensor([fasttext_model.get_word_vector(word) for word in words])
18
+
19
+ avg_embedding = embeddings.mean(dim=0).unsqueeze(0)
20
+
21
  with torch.no_grad():
22
+ output = model(avg_embedding)
23
+ predicted_class = output.argmax(dim=1).item()
 
 
 
24
  return f"Predicted class: {predicted_class}"
25
 
 
26
  iface = gr.Interface(fn=predict,
27
+ inputs="text",
28
  outputs="text",
29
  title="My Model Demo",
30
  description="Enter some text to see the model prediction.")