Spaces:
Sleeping
Sleeping
Update app.py (#2)
Browse files- Update app.py (8a1d1fcc38c182381df6e3ae64c149247786ddb5)
Co-authored-by: Joseph Pollack <[email protected]>
app.py
CHANGED
@@ -1,13 +1,18 @@
|
|
1 |
import streamlit as st
|
2 |
import pandas as pd
|
3 |
-
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
|
|
|
4 |
|
5 |
#Note this should be used always in compliance with applicable laws and regulations if used with real patient data.
|
6 |
|
7 |
-
#
|
8 |
-
|
9 |
-
tokenizer =
|
10 |
-
|
|
|
|
|
|
|
|
|
11 |
|
12 |
#Upload Patient Data
|
13 |
uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
|
|
|
1 |
import streamlit as st
|
2 |
import pandas as pd
|
3 |
+
from transformers import pipeline, AutoConfig, AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM, MistralForCausalLM
|
4 |
+
from peft import PeftModel, PeftConfig
|
5 |
|
6 |
#Note this should be used always in compliance with applicable laws and regulations if used with real patient data.
|
7 |
|
8 |
+
# Instantiate the Tokenizer
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-v0.1", trust_remote_code=True, padding_side="left")
|
10 |
+
tokenizer.pad_token = tokenizer.eos_token
|
11 |
+
tokenizer.padding_side = 'left'
|
12 |
+
# Load the PEFT model
|
13 |
+
peft_config = PeftConfig.from_pretrained("pseudolab/K23_MiniMed")
|
14 |
+
peft_model = MistralForCausalLM.from_pretrained("https://huggingface.co/HuggingFaceH4/zephyr-7b-beta", trust_remote_code=True)
|
15 |
+
peft_model = PeftModel.from_pretrained(peft_model, "pseudolab/K23_MiniMed")
|
16 |
|
17 |
#Upload Patient Data
|
18 |
uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
|