gerardozq commited on
Commit
aab06b3
1 Parent(s): ff12521

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "monologg/biobert_v1.1_pubmed",
3
  "architectures": [
4
  "BertForQuestionAnswering"
5
  ],
 
1
  {
2
+ "_name_or_path": "gerardozq/biobert_v1.1_pubmed-finetuned-squad",
3
  "architectures": [
4
  "BertForQuestionAnswering"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9290987ed43b3493ee5025dbe0b495d7a5795ef48d2c0774a0aaa699cf10273e
3
  size 430968241
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2aba1c5b02414e58fa57c0b79b4d7255de62303ccb04bc8e9b1786989bc0948
3
  size 430968241
runs/Oct17_01-53-31_3da5de560c47/1634435813.8840694/events.out.tfevents.1634435813.3da5de560c47.76.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a28511cb636902147425a640d8a5669cfc8d110916de6142251615e67d3f75b0
3
+ size 4564
runs/Oct17_01-53-31_3da5de560c47/events.out.tfevents.1634435813.3da5de560c47.76.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56e36314bf11e7a80bbd5911af0bdb3eff293971fe3c72eac4b54cab62e9bb47
3
+ size 3387
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "max_len": 512, "special_tokens_map_file": "/root/.cache/huggingface/transformers/ed9bbe6348755db01a526f9467c73a8c8f55a43191f892374c9ed386b4525997.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d", "name_or_path": "monologg/biobert_v1.1_pubmed", "do_basic_tokenize": true, "never_split": null, "tokenizer_class": "BertTokenizer"}
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "max_len": 512, "special_tokens_map_file": "/root/.cache/huggingface/transformers/ed9bbe6348755db01a526f9467c73a8c8f55a43191f892374c9ed386b4525997.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d", "name_or_path": "gerardozq/biobert_v1.1_pubmed-finetuned-squad", "do_basic_tokenize": true, "never_split": null, "tokenizer_class": "BertTokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d9f49c91413137d32df31aba04503a58768fb998896bb31f76685019035517fa
3
  size 2863
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b855cdec1158081599f2bcb71bb558dc6a908f9e597fb7246be95129d308b21f
3
  size 2863