File size: 1,685 Bytes
4bb51b4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
{
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "O",
"1": "B_BRN",
"2": "B_DES",
"3": "B_DTM",
"4": "B_LOC",
"5": "B_MEA",
"6": "B_NUM",
"7": "B_ORG",
"8": "B_PER",
"9": "B_TRM",
"10": "B_TTL",
"11": "I_BRN",
"12": "I_DES",
"13": "I_DTM",
"14": "I_LOC",
"15": "I_MEA",
"16": "I_NUM",
"17": "I_ORG",
"18": "I_PER",
"19": "I_TRM",
"20": "I_TTL",
"21": "E_BRN",
"22": "E_DES",
"23": "E_DTM",
"24": "E_LOC",
"25": "E_MEA",
"26": "E_NUM",
"27": "E_ORG",
"28": "E_PER",
"29": "E_TRM",
"30": "E_TTL"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"O": 0,
"B_BRN": 1,
"B_DES": 2,
"B_DTM": 3,
"B_LOC": 4,
"B_MEA": 5,
"B_NUM": 6,
"B_ORG": 7,
"B_PER": 8,
"B_TRM": 9,
"B_TTL": 10,
"I_BRN": 11,
"I_DES": 12,
"I_DTM": 13,
"I_LOC": 14,
"I_MEA": 15,
"I_NUM": 16,
"I_ORG": 17,
"I_PER": 18,
"I_TRM": 19,
"I_TTL": 20,
"E_BRN": 21,
"E_DES": 22,
"E_DTM": 23,
"E_LOC": 24,
"E_MEA": 25,
"E_NUM": 26,
"E_ORG": 27,
"E_PER": 28,
"E_TRM": 29,
"E_TTL": 30
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "camembert",
"num_attention_head": 12,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"type_vocab_size": 1,
"vocab_size": 25005
}
|