asahi417 commited on
Commit
8280c13
1 Parent(s): eae540a
Files changed (2) hide show
  1. config.json +327 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tner_ckpt/tweebank_ner_deberta_v3_large/best_model",
3
+ "architectures": [
4
+ "DebertaV2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "crf_state_dict": {
8
+ "_constraint_mask": [
9
+ [
10
+ 1.0,
11
+ 1.0,
12
+ 1.0,
13
+ 1.0,
14
+ 1.0,
15
+ 0.0,
16
+ 0.0,
17
+ 0.0,
18
+ 1.0,
19
+ 0.0,
20
+ 1.0
21
+ ],
22
+ [
23
+ 1.0,
24
+ 1.0,
25
+ 1.0,
26
+ 1.0,
27
+ 0.0,
28
+ 1.0,
29
+ 0.0,
30
+ 0.0,
31
+ 1.0,
32
+ 0.0,
33
+ 1.0
34
+ ],
35
+ [
36
+ 1.0,
37
+ 1.0,
38
+ 1.0,
39
+ 1.0,
40
+ 0.0,
41
+ 0.0,
42
+ 1.0,
43
+ 0.0,
44
+ 1.0,
45
+ 0.0,
46
+ 1.0
47
+ ],
48
+ [
49
+ 1.0,
50
+ 1.0,
51
+ 1.0,
52
+ 1.0,
53
+ 0.0,
54
+ 0.0,
55
+ 0.0,
56
+ 1.0,
57
+ 1.0,
58
+ 0.0,
59
+ 1.0
60
+ ],
61
+ [
62
+ 1.0,
63
+ 1.0,
64
+ 1.0,
65
+ 1.0,
66
+ 1.0,
67
+ 0.0,
68
+ 0.0,
69
+ 0.0,
70
+ 1.0,
71
+ 0.0,
72
+ 1.0
73
+ ],
74
+ [
75
+ 1.0,
76
+ 1.0,
77
+ 1.0,
78
+ 1.0,
79
+ 0.0,
80
+ 1.0,
81
+ 0.0,
82
+ 0.0,
83
+ 1.0,
84
+ 0.0,
85
+ 1.0
86
+ ],
87
+ [
88
+ 1.0,
89
+ 1.0,
90
+ 1.0,
91
+ 1.0,
92
+ 0.0,
93
+ 0.0,
94
+ 1.0,
95
+ 0.0,
96
+ 1.0,
97
+ 0.0,
98
+ 1.0
99
+ ],
100
+ [
101
+ 1.0,
102
+ 1.0,
103
+ 1.0,
104
+ 1.0,
105
+ 0.0,
106
+ 0.0,
107
+ 0.0,
108
+ 1.0,
109
+ 1.0,
110
+ 0.0,
111
+ 1.0
112
+ ],
113
+ [
114
+ 1.0,
115
+ 1.0,
116
+ 1.0,
117
+ 1.0,
118
+ 0.0,
119
+ 0.0,
120
+ 0.0,
121
+ 0.0,
122
+ 1.0,
123
+ 0.0,
124
+ 1.0
125
+ ],
126
+ [
127
+ 1.0,
128
+ 1.0,
129
+ 1.0,
130
+ 1.0,
131
+ 0.0,
132
+ 0.0,
133
+ 0.0,
134
+ 0.0,
135
+ 1.0,
136
+ 0.0,
137
+ 0.0
138
+ ],
139
+ [
140
+ 0.0,
141
+ 0.0,
142
+ 0.0,
143
+ 0.0,
144
+ 0.0,
145
+ 0.0,
146
+ 0.0,
147
+ 0.0,
148
+ 0.0,
149
+ 0.0,
150
+ 0.0
151
+ ]
152
+ ],
153
+ "end_transitions": [
154
+ 0.9945968389511108,
155
+ -0.7898595333099365,
156
+ -1.4528149366378784,
157
+ -0.39835214614868164,
158
+ -0.19360840320587158,
159
+ -0.30653536319732666,
160
+ 1.1348109245300293,
161
+ -0.5007272362709045,
162
+ 0.2630143165588379
163
+ ],
164
+ "start_transitions": [
165
+ 0.163371279835701,
166
+ 1.0295491218566895,
167
+ 1.9030133485794067,
168
+ -0.93556809425354,
169
+ -0.142340287566185,
170
+ 0.7478364109992981,
171
+ -1.244533896446228,
172
+ 0.7007433176040649,
173
+ 1.0970624685287476
174
+ ],
175
+ "transitions": [
176
+ [
177
+ 0.5219072699546814,
178
+ -0.6073024868965149,
179
+ 0.21660777926445007,
180
+ -0.08061438798904419,
181
+ 0.34963756799697876,
182
+ -0.09881197661161423,
183
+ -0.12142079323530197,
184
+ -0.12750038504600525,
185
+ 0.19532112777233124
186
+ ],
187
+ [
188
+ -0.18758216500282288,
189
+ 0.19614772498607635,
190
+ -0.21055644750595093,
191
+ -0.40785694122314453,
192
+ 0.1777714490890503,
193
+ 0.12013483047485352,
194
+ 0.3959467113018036,
195
+ 0.15063655376434326,
196
+ -0.29076510667800903
197
+ ],
198
+ [
199
+ 0.5534647703170776,
200
+ 0.012826390564441681,
201
+ -0.16149963438510895,
202
+ -0.5842018127441406,
203
+ -0.0018042281735688448,
204
+ -0.43679139018058777,
205
+ -0.23927611112594604,
206
+ -0.10121553391218185,
207
+ -0.0426090732216835
208
+ ],
209
+ [
210
+ 0.25016123056411743,
211
+ 0.04673261195421219,
212
+ -0.41687679290771484,
213
+ 0.22481179237365723,
214
+ 0.10355432331562042,
215
+ 0.20035138726234436,
216
+ -0.3028927445411682,
217
+ -0.007212463766336441,
218
+ 0.31153398752212524
219
+ ],
220
+ [
221
+ 0.5856021642684937,
222
+ -0.10948202013969421,
223
+ 0.1351092904806137,
224
+ 0.4515761733055115,
225
+ 0.26148682832717896,
226
+ -0.11256065219640732,
227
+ -0.15908482670783997,
228
+ -0.4827544391155243,
229
+ -0.4094712436199188
230
+ ],
231
+ [
232
+ 0.10443474352359772,
233
+ -0.39016222953796387,
234
+ -0.06021846458315849,
235
+ -0.11585196107625961,
236
+ -0.11244403570890427,
237
+ -0.6651127934455872,
238
+ -0.07179304957389832,
239
+ 0.05838318169116974,
240
+ -0.007226672023534775
241
+ ],
242
+ [
243
+ -0.30400359630584717,
244
+ 0.49990174174308777,
245
+ -0.3571702837944031,
246
+ -0.5655873417854309,
247
+ -0.5289591550827026,
248
+ -0.26642906665802,
249
+ 0.28862693905830383,
250
+ -0.23844502866268158,
251
+ -0.19457215070724487
252
+ ],
253
+ [
254
+ -0.37449777126312256,
255
+ -0.09788568317890167,
256
+ -0.47199949622154236,
257
+ -0.2298676073551178,
258
+ 0.0554109625518322,
259
+ -0.08147075772285461,
260
+ -0.3137938976287842,
261
+ 0.3010720908641815,
262
+ 0.3137664198875427
263
+ ],
264
+ [
265
+ -0.09058953821659088,
266
+ 0.2583652436733246,
267
+ 0.367694228887558,
268
+ 0.45001059770584106,
269
+ -0.34188705682754517,
270
+ -0.0004857222083956003,
271
+ 0.14611537754535675,
272
+ -0.3392927646636963,
273
+ 0.4324687719345093
274
+ ]
275
+ ]
276
+ },
277
+ "hidden_act": "gelu",
278
+ "hidden_dropout_prob": 0.1,
279
+ "hidden_size": 1024,
280
+ "id2label": {
281
+ "0": "B-LOC",
282
+ "1": "B-MISC",
283
+ "2": "B-ORG",
284
+ "3": "B-PER",
285
+ "4": "I-LOC",
286
+ "5": "I-MISC",
287
+ "6": "I-ORG",
288
+ "7": "I-PER",
289
+ "8": "O"
290
+ },
291
+ "initializer_range": 0.02,
292
+ "intermediate_size": 4096,
293
+ "label2id": {
294
+ "B-LOC": 0,
295
+ "B-MISC": 1,
296
+ "B-ORG": 2,
297
+ "B-PER": 3,
298
+ "I-LOC": 4,
299
+ "I-MISC": 5,
300
+ "I-ORG": 6,
301
+ "I-PER": 7,
302
+ "O": 8
303
+ },
304
+ "layer_norm_eps": 1e-07,
305
+ "max_position_embeddings": 512,
306
+ "max_relative_positions": -1,
307
+ "model_type": "deberta-v2",
308
+ "norm_rel_ebd": "layer_norm",
309
+ "num_attention_heads": 16,
310
+ "num_hidden_layers": 24,
311
+ "pad_token_id": 0,
312
+ "pooler_dropout": 0,
313
+ "pooler_hidden_act": "gelu",
314
+ "pooler_hidden_size": 1024,
315
+ "pos_att_type": [
316
+ "p2c",
317
+ "c2p"
318
+ ],
319
+ "position_biased_input": false,
320
+ "position_buckets": 256,
321
+ "relative_attention": true,
322
+ "share_att_key": true,
323
+ "torch_dtype": "float32",
324
+ "transformers_version": "4.20.1",
325
+ "type_vocab_size": 0,
326
+ "vocab_size": 128100
327
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e6734e5a6341e0f6ee6e22d35485a57b83d4461a5bb4bb7f78bd5d8f85bedc9
3
+ size 1736217519