ZeroGPU-test / app.py
zhenyundeng
update
c36717c
from fastapi import FastAPI
import uvicorn
import spaces
import torch
from pydantic import BaseModel
from transformers import RobertaTokenizer, RobertaForSequenceClassification
if torch.cuda.is_available():
tokenizer = RobertaTokenizer.from_pretrained('Dzeniks/roberta-fact-check')
fc_model = RobertaForSequenceClassification.from_pretrained('Dzeniks/roberta-fact-check')
app = FastAPI()
# ------------------------------------------------------------------------
class Item(BaseModel):
claim: str
evidence: str
@app.post("/predict/")
@spaces.GPU
def fact_checking(item: Item):
# # claim = item['claim']
# # source = item['source']
# claim = item.claim
# source = item.source
claim = item.claim
evidence = item.evidence
# claim = item['claim']
# evidence = item['evidence']
input = tokenizer.encode_plus(claim, evidence, return_tensors="pt")
fc_model.eval()
with torch.no_grad():
outputs = fc_model(**input)
label = torch.argmax(outputs[0]).item()
return {"Verdict": label}
@app.get("/")
@spaces.GPU
def greet_json():
return {"Hello": "World!"}
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=7860)
# if __name__ == "__main__":
# item = {
# "claim": "Albert Einstein work in the field of computer science.",
# "evidence": "Albert Einstein was a German-born theoretical physicist, widely acknowledged to be one of the greatest and most influential physicists of all time.",
# }
#
# results = fact_checking(item)
#
# print(results)