Spaces:
Runtime error
Runtime error
update app.py
Browse files
app.py
CHANGED
@@ -1,27 +1,27 @@
|
|
1 |
-
from transformers import pipeline
|
2 |
import gradio as gr
|
3 |
|
4 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
5 |
-
|
6 |
-
|
7 |
-
#MODEL_DIRECTORY = "/models/mrm8488-t5-base-finetuned-emotion"
|
8 |
-
tokenizer = AutoTokenizer.from_pretrained("tuner007/pegasus_paraphrase", use_fast=False)
|
9 |
-
#if not path.exists(MODEL_DIRECTORY):
|
10 |
-
model = AutoModelForCausalLM.from_pretrained("tuner007/pegasus_paraphrase")
|
11 |
-
# model.save_pretrained(MODEL_DIRECTORY)
|
12 |
-
#else:
|
13 |
-
# model = AutoModelWithLMHead.from_pretrained(MODEL_DIRECTORY)
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
def get_emotion(text):
|
17 |
-
input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
|
18 |
-
|
19 |
-
|
20 |
-
# print(output)
|
21 |
-
dec = [tokenizer.decode(ids) for ids in output]
|
22 |
-
print(dec)
|
23 |
-
label = dec[0]
|
24 |
-
return
|
25 |
|
26 |
|
27 |
|
@@ -31,8 +31,7 @@ def parph(name= "paraphrase: This is something which I cannt understand at all."
|
|
31 |
##model_name = 'tuner007/pegasus_paraphrase'
|
32 |
#text2text = pipeline('text2text-generation', model = "Vamsi/T5_Paraphrase_Paws")
|
33 |
##text2text(name)
|
34 |
-
test = get_emotion(
|
35 |
-
)
|
36 |
return test # text2text(name)
|
37 |
|
38 |
|
|
|
1 |
+
#from transformers import pipeline
|
2 |
import gradio as gr
|
3 |
|
4 |
+
#from transformers import AutoTokenizer, AutoModelForCausalLM
|
5 |
+
##from os import path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
+
##MODEL_DIRECTORY = "/models/mrm8488-t5-base-finetuned-emotion"
|
8 |
+
#tokenizer = AutoTokenizer.from_pretrained("tuner007/pegasus_paraphrase", use_fast=False)
|
9 |
+
##if not path.exists(MODEL_DIRECTORY):
|
10 |
+
#model = AutoModelForCausalLM.from_pretrained("tuner007/pegasus_paraphrase")
|
11 |
+
## model.save_pretrained(MODEL_DIRECTORY)
|
12 |
+
##else:
|
13 |
+
## model = AutoModelWithLMHead.from_pretrained(MODEL_DIRECTORY)
|
14 |
+
#
|
15 |
|
16 |
def get_emotion(text):
|
17 |
+
# input_ids = tokenizer.encode(text + '</s>', return_tensors='pt')
|
18 |
+
# output = model.generate(input_ids=input_ids, max_length=2)
|
19 |
+
#
|
20 |
+
# # print(output)
|
21 |
+
# dec = [tokenizer.decode(ids) for ids in output]
|
22 |
+
# print(dec)
|
23 |
+
# label = dec[0]
|
24 |
+
return text
|
25 |
|
26 |
|
27 |
|
|
|
31 |
##model_name = 'tuner007/pegasus_paraphrase'
|
32 |
#text2text = pipeline('text2text-generation', model = "Vamsi/T5_Paraphrase_Paws")
|
33 |
##text2text(name)
|
34 |
+
test = get_emotion(name)
|
|
|
35 |
return test # text2text(name)
|
36 |
|
37 |
|