cindyangelira commited on
Commit
fbe2ae7
1 Parent(s): 67ec522
Files changed (2) hide show
  1. config.json +24 -24
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
- "_name_or_path": "cahya/xlm-roberta-large-indonesian-NER",
 
3
  "architectures": [
4
  "XLMRobertaForTokenClassification"
5
  ],
@@ -7,37 +8,36 @@
7
  "bos_token_id": 0,
8
  "classifier_dropout": null,
9
  "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 1024,
14
  "id2label": {
15
- "0": "I-DOB",
16
- "1": "B-EMAIL",
17
- "2": "B-LOCATION",
18
- "3": "I-PERSON",
19
- "4": "B-ACCOUNT",
20
- "5": "B-DOB",
21
- "6": "O",
22
- "7": "I-LOCATION",
23
- "8": "B-PHONE",
24
- "9": "B-PERSON",
25
- "10": "B-ID"
26
  },
27
  "initializer_range": 0.02,
28
  "intermediate_size": 4096,
29
  "label2id": {
30
- "B-ACCOUNT": 4,
31
- "B-DOB": 5,
32
- "B-EMAIL": 1,
33
- "B-ID": 10,
34
- "B-LOCATION": 2,
35
- "B-PERSON": 9,
36
- "B-PHONE": 8,
37
- "I-DOB": 0,
38
- "I-LOCATION": 7,
39
- "I-PERSON": 3,
40
- "O": 6
41
  },
42
  "layer_norm_eps": 1e-05,
43
  "max_position_embeddings": 514,
 
1
  {
2
+ "_name_or_path": "FacebookAI/xlm-roberta-large-finetuned-conll03-english",
3
+ "_num_labels": 8,
4
  "architectures": [
5
  "XLMRobertaForTokenClassification"
6
  ],
 
8
  "bos_token_id": 0,
9
  "classifier_dropout": null,
10
  "eos_token_id": 2,
 
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 1024,
14
  "id2label": {
15
+ "0": "B-PER",
16
+ "1": "O",
17
+ "2": "B-EMAIL",
18
+ "3": "B-LOC",
19
+ "4": "I-LOC",
20
+ "5": "B-DATE_TIME",
21
+ "6": "I-DATE_TIME",
22
+ "7": "B-PHONE",
23
+ "8": "B-GENDER",
24
+ "9": "I-PER",
25
+ "10": "B-SSN"
26
  },
27
  "initializer_range": 0.02,
28
  "intermediate_size": 4096,
29
  "label2id": {
30
+ "B-DATE_TIME": 5,
31
+ "B-EMAIL": 2,
32
+ "B-GENDER": 8,
33
+ "B-LOC": 3,
34
+ "B-PER": 0,
35
+ "B-PHONE": 7,
36
+ "B-SSN": 10,
37
+ "I-DATE_TIME": 6,
38
+ "I-LOC": 4,
39
+ "I-PER": 9,
40
+ "O": 1
41
  },
42
  "layer_norm_eps": 1e-05,
43
  "max_position_embeddings": 514,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b0b7264d9c4901a5c9ced504f592a7e9704119e6671ba693c5a198ff1d39563a
3
  size 2235456956
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a28272234f831b77990a4935ce0ba9ac0da469c363a3d45944e585ec515731f
3
  size 2235456956