sivan22's picture
Create app.py
b3f14a4
raw
history blame
2.26 kB
import streamlit as st
from streamlit.logger import get_logger
from transformers import pipeline
import datasets
import pandas as pd
from huggingface_hub import login
LOGGER = get_logger(__name__)
model = "sivan22/halacha-siman-seif-classifier-new"
login('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')
ds = datasets.load_dataset('sivan22/orach-chaim',token=True)
df = ds['train'].to_pandas()
def clean(s)->str:
return s.replace(" ","")
df['seif']= df['seif'].apply(clean)
def get_predicts_local(input)->str:
classifier = pipeline("text-classification",model=model,top_k=None)
predicts = classifier(input)
return predicts
def get_predicts_online(input)->str:
import requests
API_URL = "https://api-inference.huggingface.co/models/" + model
headers = {"Authorization": f"Bearer {'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'}"}
def query(input_text):
response = requests.post(API_URL, headers=headers, json='{{inputs:' +input_text+'}{wait_for_model:true}}')
return response.json()
predicts = query(input)
return predicts
def run():
st.set_page_config(
page_title="Halacha classification",
page_icon="",
)
st.write("# ื—ื™ืคื•ืฉ ื‘ืฉื•ืœื—ืŸ ืขืจื•ืš")
use_local = st.checkbox("ื—ื™ืคื•ืฉ ืœื ืžืงื•ื•ืŸ")
user_input = st.text_input('ื›ืชื•ื‘ ื›ืืŸ ืืช ืฉืืœืชืš', placeholder='ื›ืžื” ื ืจื•ืช ืžื“ืœื™ืงื™ื ื‘ื—ื ื•ื›ื”')
if st.button('ื—ืคืฉ') and user_input!="":
get_predicts = get_predicts_local if use_local else get_predicts_online
#print(get_predicts(user_input)[0][0:5])
for prediction in get_predicts(user_input)[0][:5]:
rows = df[((df["bookname"] == " ืฉืœื—ืŸ ืขืจื•ืš - ืื•ืจื— ื—ื™ื™ื ") |
(df["bookname"] ==" ืžืฉื ื” ื‘ืจื•ืจื”")) &
(df["siman"] == prediction['label'].split(' ')[0])&
(df["seif"] == prediction['label'].split(' ')[1]) ]
rows.sort_values(["bookname"],ascending=False, inplace=True)
print(prediction['label'].split(' '))
st.write('ืกื™ืžืŸ ' + str(prediction['label']), rows[['text','sek','seif','siman','bookname']])
if __name__ == "__main__":
run()