blaze999 commited on
Commit
8df83de
1 Parent(s): 2f07574

Training complete

Browse files
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ base_model: microsoft/deberta-v3-base
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: ner-wand-test
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # ner-wand-test
15
+
16
+ This model is a fine-tuned version of [microsoft/deberta-v3-base](https://huggingface.co/microsoft/deberta-v3-base) on the None dataset.
17
+
18
+ ## Model description
19
+
20
+ More information needed
21
+
22
+ ## Intended uses & limitations
23
+
24
+ More information needed
25
+
26
+ ## Training and evaluation data
27
+
28
+ More information needed
29
+
30
+ ## Training procedure
31
+
32
+ ### Training hyperparameters
33
+
34
+ The following hyperparameters were used during training:
35
+ - learning_rate: 2e-05
36
+ - train_batch_size: 8
37
+ - eval_batch_size: 16
38
+ - seed: 42
39
+ - gradient_accumulation_steps: 2
40
+ - total_train_batch_size: 16
41
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
42
+ - lr_scheduler_type: cosine
43
+ - lr_scheduler_warmup_ratio: 0.1
44
+ - num_epochs: 30
45
+ - mixed_precision_training: Native AMP
46
+
47
+ ### Training results
48
+
49
+
50
+
51
+ ### Framework versions
52
+
53
+ - Transformers 4.37.0
54
+ - Pytorch 2.1.2
55
+ - Datasets 2.1.0
56
+ - Tokenizers 0.15.1
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
config.json ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/deberta-v3-base",
3
+ "architectures": [
4
+ "DebertaV2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "O",
12
+ "1": "B-ACTIVITY",
13
+ "2": "I-ACTIVITY",
14
+ "3": "I-ADMINISTRATION",
15
+ "4": "B-ADMINISTRATION",
16
+ "5": "B-AGE",
17
+ "6": "I-AGE",
18
+ "7": "I-AREA",
19
+ "8": "B-AREA",
20
+ "9": "B-BIOLOGICAL_ATTRIBUTE",
21
+ "10": "I-BIOLOGICAL_ATTRIBUTE",
22
+ "11": "I-BIOLOGICAL_STRUCTURE",
23
+ "12": "B-BIOLOGICAL_STRUCTURE",
24
+ "13": "B-CLINICAL_EVENT",
25
+ "14": "I-CLINICAL_EVENT",
26
+ "15": "B-COLOR",
27
+ "16": "I-COLOR",
28
+ "17": "I-COREFERENCE",
29
+ "18": "B-COREFERENCE",
30
+ "19": "B-DATE",
31
+ "20": "I-DATE",
32
+ "21": "I-DETAILED_DESCRIPTION",
33
+ "22": "B-DETAILED_DESCRIPTION",
34
+ "23": "I-DIAGNOSTIC_PROCEDURE",
35
+ "24": "B-DIAGNOSTIC_PROCEDURE",
36
+ "25": "I-DISEASE_DISORDER",
37
+ "26": "B-DISEASE_DISORDER",
38
+ "27": "B-DISTANCE",
39
+ "28": "I-DISTANCE",
40
+ "29": "B-DOSAGE",
41
+ "30": "I-DOSAGE",
42
+ "31": "I-DURATION",
43
+ "32": "B-DURATION",
44
+ "33": "I-FAMILY_HISTORY",
45
+ "34": "B-FAMILY_HISTORY",
46
+ "35": "B-FREQUENCY",
47
+ "36": "I-FREQUENCY",
48
+ "37": "I-HEIGHT",
49
+ "38": "B-HEIGHT",
50
+ "39": "B-HISTORY",
51
+ "40": "I-HISTORY",
52
+ "41": "I-LAB_VALUE",
53
+ "42": "B-LAB_VALUE",
54
+ "43": "I-MASS",
55
+ "44": "B-MASS",
56
+ "45": "I-MEDICATION",
57
+ "46": "B-MEDICATION",
58
+ "47": "I-NONBIOLOGICAL_LOCATION",
59
+ "48": "B-NONBIOLOGICAL_LOCATION",
60
+ "49": "I-OCCUPATION",
61
+ "50": "B-OCCUPATION",
62
+ "51": "B-OTHER_ENTITY",
63
+ "52": "I-OTHER_ENTITY",
64
+ "53": "B-OTHER_EVENT",
65
+ "54": "I-OTHER_EVENT",
66
+ "55": "I-OUTCOME",
67
+ "56": "B-OUTCOME",
68
+ "57": "I-PERSONAL_BACKGROUND",
69
+ "58": "B-PERSONAL_BACKGROUND",
70
+ "59": "B-QUALITATIVE_CONCEPT",
71
+ "60": "I-QUALITATIVE_CONCEPT",
72
+ "61": "I-QUANTITATIVE_CONCEPT",
73
+ "62": "B-QUANTITATIVE_CONCEPT",
74
+ "63": "B-SEVERITY",
75
+ "64": "I-SEVERITY",
76
+ "65": "B-SEX",
77
+ "66": "I-SEX",
78
+ "67": "B-SHAPE",
79
+ "68": "I-SHAPE",
80
+ "69": "B-SIGN_SYMPTOM",
81
+ "70": "I-SIGN_SYMPTOM",
82
+ "71": "B-SUBJECT",
83
+ "72": "I-SUBJECT",
84
+ "73": "B-TEXTURE",
85
+ "74": "I-TEXTURE",
86
+ "75": "B-THERAPEUTIC_PROCEDURE",
87
+ "76": "I-THERAPEUTIC_PROCEDURE",
88
+ "77": "I-TIME",
89
+ "78": "B-TIME",
90
+ "79": "B-VOLUME",
91
+ "80": "I-VOLUME",
92
+ "81": "I-WEIGHT",
93
+ "82": "B-WEIGHT"
94
+ },
95
+ "initializer_range": 0.02,
96
+ "intermediate_size": 3072,
97
+ "label2id": {
98
+ "B-ACTIVITY": 1,
99
+ "B-ADMINISTRATION": 4,
100
+ "B-AGE": 5,
101
+ "B-AREA": 8,
102
+ "B-BIOLOGICAL_ATTRIBUTE": 9,
103
+ "B-BIOLOGICAL_STRUCTURE": 12,
104
+ "B-CLINICAL_EVENT": 13,
105
+ "B-COLOR": 15,
106
+ "B-COREFERENCE": 18,
107
+ "B-DATE": 19,
108
+ "B-DETAILED_DESCRIPTION": 22,
109
+ "B-DIAGNOSTIC_PROCEDURE": 24,
110
+ "B-DISEASE_DISORDER": 26,
111
+ "B-DISTANCE": 27,
112
+ "B-DOSAGE": 29,
113
+ "B-DURATION": 32,
114
+ "B-FAMILY_HISTORY": 34,
115
+ "B-FREQUENCY": 35,
116
+ "B-HEIGHT": 38,
117
+ "B-HISTORY": 39,
118
+ "B-LAB_VALUE": 42,
119
+ "B-MASS": 44,
120
+ "B-MEDICATION": 46,
121
+ "B-NONBIOLOGICAL_LOCATION": 48,
122
+ "B-OCCUPATION": 50,
123
+ "B-OTHER_ENTITY": 51,
124
+ "B-OTHER_EVENT": 53,
125
+ "B-OUTCOME": 56,
126
+ "B-PERSONAL_BACKGROUND": 58,
127
+ "B-QUALITATIVE_CONCEPT": 59,
128
+ "B-QUANTITATIVE_CONCEPT": 62,
129
+ "B-SEVERITY": 63,
130
+ "B-SEX": 65,
131
+ "B-SHAPE": 67,
132
+ "B-SIGN_SYMPTOM": 69,
133
+ "B-SUBJECT": 71,
134
+ "B-TEXTURE": 73,
135
+ "B-THERAPEUTIC_PROCEDURE": 75,
136
+ "B-TIME": 78,
137
+ "B-VOLUME": 79,
138
+ "B-WEIGHT": 82,
139
+ "I-ACTIVITY": 2,
140
+ "I-ADMINISTRATION": 3,
141
+ "I-AGE": 6,
142
+ "I-AREA": 7,
143
+ "I-BIOLOGICAL_ATTRIBUTE": 10,
144
+ "I-BIOLOGICAL_STRUCTURE": 11,
145
+ "I-CLINICAL_EVENT": 14,
146
+ "I-COLOR": 16,
147
+ "I-COREFERENCE": 17,
148
+ "I-DATE": 20,
149
+ "I-DETAILED_DESCRIPTION": 21,
150
+ "I-DIAGNOSTIC_PROCEDURE": 23,
151
+ "I-DISEASE_DISORDER": 25,
152
+ "I-DISTANCE": 28,
153
+ "I-DOSAGE": 30,
154
+ "I-DURATION": 31,
155
+ "I-FAMILY_HISTORY": 33,
156
+ "I-FREQUENCY": 36,
157
+ "I-HEIGHT": 37,
158
+ "I-HISTORY": 40,
159
+ "I-LAB_VALUE": 41,
160
+ "I-MASS": 43,
161
+ "I-MEDICATION": 45,
162
+ "I-NONBIOLOGICAL_LOCATION": 47,
163
+ "I-OCCUPATION": 49,
164
+ "I-OTHER_ENTITY": 52,
165
+ "I-OTHER_EVENT": 54,
166
+ "I-OUTCOME": 55,
167
+ "I-PERSONAL_BACKGROUND": 57,
168
+ "I-QUALITATIVE_CONCEPT": 60,
169
+ "I-QUANTITATIVE_CONCEPT": 61,
170
+ "I-SEVERITY": 64,
171
+ "I-SEX": 66,
172
+ "I-SHAPE": 68,
173
+ "I-SIGN_SYMPTOM": 70,
174
+ "I-SUBJECT": 72,
175
+ "I-TEXTURE": 74,
176
+ "I-THERAPEUTIC_PROCEDURE": 76,
177
+ "I-TIME": 77,
178
+ "I-VOLUME": 80,
179
+ "I-WEIGHT": 81,
180
+ "O": 0
181
+ },
182
+ "layer_norm_eps": 1e-07,
183
+ "max_position_embeddings": 512,
184
+ "max_relative_positions": -1,
185
+ "model_type": "deberta-v2",
186
+ "norm_rel_ebd": "layer_norm",
187
+ "num_attention_heads": 12,
188
+ "num_hidden_layers": 12,
189
+ "pad_token_id": 0,
190
+ "pooler_dropout": 0,
191
+ "pooler_hidden_act": "gelu",
192
+ "pooler_hidden_size": 768,
193
+ "pos_att_type": [
194
+ "p2c",
195
+ "c2p"
196
+ ],
197
+ "position_biased_input": false,
198
+ "position_buckets": 256,
199
+ "relative_attention": true,
200
+ "share_att_key": true,
201
+ "torch_dtype": "float32",
202
+ "transformers_version": "4.37.0",
203
+ "type_vocab_size": 0,
204
+ "vocab_size": 128100
205
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:108384c2a391d39b5329f5a4ea0a8f80a8f6fe131cbc768adb8ffbf4ecf79d7f
3
+ size 735605900
runs/Feb12_07-26-02_db5e1b3cab0a/events.out.tfevents.1707722770.db5e1b3cab0a.35.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dadfb0fe17bfe4ff7a6c2290e42e7fcd42ec199bf9337a3be53a72ff634e248
3
+ size 11916
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": true,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 1000000000000000019884624838656,
51
+ "pad_token": "[PAD]",
52
+ "sep_token": "[SEP]",
53
+ "sp_model_kwargs": {},
54
+ "split_by_punct": false,
55
+ "tokenizer_class": "DebertaV2Tokenizer",
56
+ "unk_token": "[UNK]",
57
+ "vocab_type": "spm"
58
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2774c0ecf15f18559a59eac1e027a39c2f20044ce444603797e7892ed08e445
3
+ size 4728