Spaces:
Sleeping
Sleeping
File size: 2,805 Bytes
1ab4362 8283b35 ed732ec 43da25f a0ff5e6 b2820ed 80f9f0f b2820ed 39514f6 a0ff5e6 7fa37a5 b2820ed 401411c 7fa37a5 3bbff8c f1f799c 401411c 3bbff8c e07cb10 5e96611 3c02d56 a0ff5e6 f8bc593 43da25f f8bc593 a0ff5e6 c0f56fe 43da25f a0ff5e6 11e71f5 3659c6a e07cb10 27001e3 5e96611 c9c4e90 95946b5 7b6d8e8 95946b5 27001e3 5e96611 95946b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
import gradio as gr
import os
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
import json
import socket
from datetime import datetime
import huggingface_hub
from huggingface_hub import Repository
import os
access_token = os.environ['ACCES_TOKEN']
DATASET_REPO_URL = "https://huggingface.co/datasets/EkhiAzur/Demoko_informazioa"
DATA_FILENAME = "Erabiltzaileak.txt"
DATA_FILE = os.path.join("data", DATA_FILENAME)
model = AutoModelForSequenceClassification.from_pretrained("EkhiAzur/C1_Sailkapen_Demoa", token=access_token)
tokenizer = AutoTokenizer.from_pretrained(
"EkhiAzur/C1_Sailkapen_Demoa",
token = access_token,
use_fast=True,
add_prefix_space=True,
)
classifier = pipeline("text-classification", tokenizer=tokenizer, model=model, max_length=512,
padding=True, truncation=True, batch_size=1)
adibideak = json.load(open("./Adibideak.json", "r"))
def prozesatu(Testua, request: gr.Request):
repo = Repository(
local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=access_token
)
#Ip-a lortzeko kontuak
client_ip = request.client.host
local_ip = socket.gethostbyname(socket.gethostbyname(""))
headers = request.kwargs['headers']
if headers and 'x-forwarded-for' in headers:
x_forwarded_for = headers['x-forwarded-for']
client_ip = x_forwarded_for.split(' ')[0] if x_forwarded_for else ""
# Eguna eta ordua lortu
now = datetime.now()
#Fitxategian gorde
f = open(DATA_FILE, "a")
print(f'Erabiltzailea: {client_ip}. Eguna eta ordua: {now}.\n')
f.write(f'Erabiltzailea: {client_ip}. Eguna eta ordua: {now}.\n')
f.close()
commit_url = repo.push_to_hub()
prediction = prozesatu.classifier(Testua)[0]
if prediction["label"]=="GAI":
return {"Gai":prediction["score"], "Ez gai": 1-prediction["score"]}
else:
return {"Gai":1-prediction["score"], "Ez gai": prediction["score"]}
def testua_itzuli(testua):
if testua not in testua_itzuli.adibideak:
return ""
return testua_itzuli.adibideak[testua]
testua_itzuli.adibideak = adibideak
prozesatu.adibideak = adibideak
prozesatu.classifier = classifier
def ezabatu(Testua):
return ""
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
input = gr.Textbox(label="Testua")
with gr.Row():
bidali_btn = gr.Button("Bidali")
ezabatu_btn = gr.Button("Ezabatu")
label = gr.Label(num_top_classes=2, label="C1 maila")
bidali_btn.click(fn=prozesatu, inputs=input, outputs=label)
ezabatu_btn.click(fn=ezabatu, inputs=input, outputs=input)
gr.Examples(list(adibideak.keys()), inputs=input, outputs=input, label="Adibideak:", fn=testua_itzuli, cache_examples=True)
demo.launch() |