arunavsk1 commited on
Commit
89c66bc
1 Parent(s): 7970241

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -21
app.py CHANGED
@@ -15,32 +15,33 @@ st.set_page_config(layout="wide")
15
 
16
  st.title("Demo for EIC NER")
17
 
18
- model_list = ['akdeniz27/bert-base-turkish-cased-ner',
19
- 'akdeniz27/convbert-base-turkish-cased-ner',
20
- 'akdeniz27/xlm-roberta-base-turkish-ner',
21
- 'xlm-roberta-large-finetuned-conll03-english']
 
22
 
23
- st.sidebar.header("Select NER Model")
24
- model_checkpoint = st.sidebar.radio("", model_list)
25
 
26
- st.sidebar.write("For details of models: 'https://huggingface.co/akdeniz27/")
27
- st.sidebar.write("")
28
 
29
- xlm_agg_strategy_info = "'aggregation_strategy' can be selected as 'simple' or 'none' for 'xlm-roberta' because of the RoBERTa model's tokenization approach."
30
 
31
- st.sidebar.header("Select Aggregation Strategy Type")
32
- if model_checkpoint == "akdeniz27/xlm-roberta-base-turkish-ner":
33
- aggregation = st.sidebar.radio("", ('simple', 'none'))
34
- st.sidebar.write(xlm_agg_strategy_info)
35
- elif model_checkpoint == "xlm-roberta-large-finetuned-conll03-english":
36
- aggregation = st.sidebar.radio("", ('simple', 'none'))
37
- st.sidebar.write(xlm_agg_strategy_info)
38
- st.sidebar.write("")
39
- st.sidebar.write("This English NER model is included just to show the zero-shot transfer learning capability of XLM-Roberta.")
40
- else:
41
- aggregation = st.sidebar.radio("", ('first', 'simple', 'average', 'max', 'none'))
42
 
43
- st.sidebar.write("Please refer 'https://huggingface.co/transformers/_modules/transformers/pipelines/token_classification.html' for entity grouping with aggregation_strategy parameter.")
44
 
45
  st.subheader("Select Text Input Method")
46
  input_method = st.radio("", ('Select from Examples', 'Write or Paste New Text'))
 
15
 
16
  st.title("Demo for EIC NER")
17
 
18
+ model_list = ['/arunavsk1/my-awesome-pubmed-bert/'
19
+ # 'akdeniz27/convbert-base-turkish-cased-ner',
20
+ # 'akdeniz27/xlm-roberta-base-turkish-ner',
21
+ # 'xlm-roberta-large-finetuned-conll03-english'
22
+ ]
23
 
24
+ # st.sidebar.header("Select NER Model")
25
+ # model_checkpoint = st.sidebar.radio("", model_list)
26
 
27
+ # st.sidebar.write("For details of models: 'https://huggingface.co/akdeniz27/")
28
+ # st.sidebar.write("")
29
 
30
+ # xlm_agg_strategy_info = "'aggregation_strategy' can be selected as 'simple' or 'none' for 'xlm-roberta' because of the RoBERTa model's tokenization approach."
31
 
32
+ # st.sidebar.header("Select Aggregation Strategy Type")
33
+ # if model_checkpoint == "akdeniz27/xlm-roberta-base-turkish-ner":
34
+ # aggregation = st.sidebar.radio("", ('simple', 'none'))
35
+ # st.sidebar.write(xlm_agg_strategy_info)
36
+ # elif model_checkpoint == "xlm-roberta-large-finetuned-conll03-english":
37
+ # aggregation = st.sidebar.radio("", ('simple', 'none'))
38
+ # st.sidebar.write(xlm_agg_strategy_info)
39
+ # st.sidebar.write("")
40
+ # st.sidebar.write("This English NER model is included just to show the zero-shot transfer learning capability of XLM-Roberta.")
41
+ # else:
42
+ # aggregation = st.sidebar.radio("", ('first', 'simple', 'average', 'max', 'none'))
43
 
44
+ # st.sidebar.write("Please refer 'https://huggingface.co/transformers/_modules/transformers/pipelines/token_classification.html' for entity grouping with aggregation_strategy parameter.")
45
 
46
  st.subheader("Select Text Input Method")
47
  input_method = st.radio("", ('Select from Examples', 'Write or Paste New Text'))