import gradio as gr from transformers import AutoModel, pipeline, AutoTokenizer, AutoModelForSequenceClassification access_token = "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" model = AutoModelForSequenceClassification.from_pretrained("EkhiAzur/RoBERTA_3", token=access_token) tokenizer = AutoTokenizer.from_pretrained( "EkhiAzur/RoBERTA_3", token = access_token, use_fast=True, add_prefix_space=True, ) classifier = pipeline("text-classification", tokenizer=tokenizer, model=model, max_length=512, padding=True, truncation=True, batch_size=1) def prozesatu(Testua, request: gr.Request): return str(request.headers["accept-language"]) prediction = prozesatu.classifier(Testua)[0] if prediction["label"]=="GAI": return {"Gai":prediction["score"], "Ez gai": 1-prediction["score"]} else: return {"Gai":1-prediction["score"], "Ez gai": prediction["score"]} #return 'C1:{}. Probabilitatea:{:.2f}'.format(prediction["label"], round(prediction["score"], 2)) prozesatu.classifier = classifier demo = gr.Interface( fn=prozesatu, inputs=gr.Textbox(label="Testua", placeholder="Idatzi hemen testua..."), outputs="label", #interpretation="default", #examples=[["Gaur egungo teknologiak bikainak dira..."]] ).launch() #gr.Interface(fn=prozesatu, inputs="text", outputs="text").launch()