QCRI
/

fdalvi's picture
NAACL 2022 trained version
365645d
raw
history blame
2.86 kB
{
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"directionality": "bidi",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "O",
"1": "ALT",
"2": "AND",
"3": "APP",
"4": "ART",
"5": "BUT",
"6": "COM",
"7": "CON",
"8": "COO",
"9": "DEC",
"10": "DEF",
"11": "DIS",
"12": "DOM",
"13": "DOW",
"14": "DST",
"15": "EFS",
"16": "EMP",
"17": "ENG",
"18": "ENS",
"19": "ENT",
"20": "EPG",
"21": "EPS",
"22": "EPT",
"23": "EQA",
"24": "ETG",
"25": "ETV",
"26": "EXC",
"27": "EXG",
"28": "EXN",
"29": "EXS",
"30": "EXT",
"31": "EXV",
"32": "FUT",
"33": "GPE",
"34": "HAP",
"35": "HAS",
"36": "IMP",
"37": "INT",
"38": "IST",
"39": "ITJ",
"40": "LES",
"41": "LOC",
"42": "MOR",
"43": "MOY",
"44": "NAT",
"45": "NEC",
"46": "NIL",
"47": "NOT",
"48": "NOW",
"49": "ORG",
"50": "PER",
"51": "POS",
"52": "PRO",
"53": "PRX",
"54": "PST",
"55": "QUA",
"56": "QUE",
"57": "REF",
"58": "REL",
"59": "RLI",
"60": "ROL",
"61": "SCO",
"62": "SUB",
"63": "TIM",
"64": "TOP",
"65": "UNK",
"66": "UOM",
"67": "YOC"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"ALT": 1,
"AND": 2,
"APP": 3,
"ART": 4,
"BUT": 5,
"COM": 6,
"CON": 7,
"COO": 8,
"DEC": 9,
"DEF": 10,
"DIS": 11,
"DOM": 12,
"DOW": 13,
"DST": 14,
"EFS": 15,
"EMP": 16,
"ENG": 17,
"ENS": 18,
"ENT": 19,
"EPG": 20,
"EPS": 21,
"EPT": 22,
"EQA": 23,
"ETG": 24,
"ETV": 25,
"EXC": 26,
"EXG": 27,
"EXN": 28,
"EXS": 29,
"EXT": 30,
"EXV": 31,
"FUT": 32,
"GPE": 33,
"HAP": 34,
"HAS": 35,
"IMP": 36,
"INT": 37,
"IST": 38,
"ITJ": 39,
"LES": 40,
"LOC": 41,
"MOR": 42,
"MOY": 43,
"NAT": 44,
"NEC": 45,
"NIL": 46,
"NOT": 47,
"NOW": 48,
"O": 0,
"ORG": 49,
"PER": 50,
"POS": 51,
"PRO": 52,
"PRX": 53,
"PST": 54,
"QUA": 55,
"QUE": 56,
"REF": 57,
"REL": 58,
"RLI": 59,
"ROL": 60,
"SCO": 61,
"SUB": 62,
"TIM": 63,
"TOP": 64,
"UNK": 65,
"UOM": 66,
"YOC": 67
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"type_vocab_size": 2,
"vocab_size": 119547
}