Spaces:
Sleeping
Sleeping
File size: 1,946 Bytes
8a6c5b2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
from transformers import pipeline
import streamlit as st
col_left, col_middle, col_right= st.columns(3)
col_middle.title("MedBotDash")
st.divider()
@st.cache_resource()
def load_model():
model = pipeline("token-classification",
model="Clinical-AI-Apollo/Medical-NER",
aggregation_strategy='simple')
return model
pipe = load_model()
st.subheader("Enter Detailed Description of your condition")
condition = st.text_input("enter the condition")
data = pipe(condition)
#st.write(data)
severity = []
sign_symptom = []
biological_structure = []
age = []
sex = []
lab_value = []
# Iterate through the data and append words to their respective lists based on entity_group
for entity in data:
if entity['entity_group'] == 'SEVERITY':
severity.append(entity['word'])
elif entity['entity_group'] == 'SIGN_SYMPTOM':
sign_symptom.append(entity['word'])
elif entity['entity_group'] == 'BIOLOGICAL_STRUCTURE':
biological_structure.append(entity['word'])
elif entity['entity_group'] == 'AGE':
age.append(entity['word'])
elif entity['entity_group'] == 'SEX':
sex.append(entity['word'])
elif entity['entity_group'] == 'LAB_VALUE':
lab_value.append(entity['word'])
col1, col2= st.columns(2)
col1.metric("Age", age[0] if age else 'NA')
col2.metric("Sex", sex[0] if sex else 'NA')
st.divider()
sign_symptom = set(sign_symptom)
severity = set(severity)
biological_structure = set(biological_structure)
age = set(age)
sex = set(age)
lab_value = set(lab_value)
tab1, tab2, tab3 = st.tabs(["Signs", "Biological Structure", "Severity"])
#st.subheader("Signs")
with tab1:
for sign in sign_symptom:
st.text(sign)
#st.subheader("Severity")
with tab2:
for bio in biological_structure:
st.text(bio)
with tab3:
for severity in severity:
st.text(severity)
#st.subheader("Biological Structure")
|