{ "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-acl", "1": "B-acl:relcl", "2": "B-advcl", "3": "B-advmod", "4": "B-appos", "5": "B-aux", "6": "B-aux:pass", "7": "B-case", "8": "B-cc", "9": "B-cc:preconj", "10": "B-ccomp", "11": "B-clf", "12": "B-compound", "13": "B-compound:prt", "14": "B-conj", "15": "B-cop", "16": "B-csubj", "17": "B-det", "18": "B-det:predet", "19": "B-discourse", "20": "B-dislocated", "21": "B-fixed", "22": "B-flat:name", "23": "B-goeswith", "24": "B-iobj", "25": "B-mark", "26": "B-nmod", "27": "B-nmod:poss", "28": "B-nsubj", "29": "B-nsubj:pass", "30": "B-nummod", "31": "B-obj", "32": "B-obl", "33": "B-obl:poss", "34": "B-obl:tmod", "35": "B-parataxis", "36": "B-punct", "37": "B-reparandum", "38": "B-root", "39": "B-vocative", "40": "B-xcomp", "41": "I-acl", "42": "I-acl:relcl", "43": "I-advcl", "44": "I-advmod", "45": "I-appos", "46": "I-aux", "47": "I-aux:pass", "48": "I-case", "49": "I-cc", "50": "I-ccomp", "51": "I-clf", "52": "I-compound", "53": "I-conj", "54": "I-cop", "55": "I-csubj", "56": "I-det", "57": "I-det:predet", "58": "I-discourse", "59": "I-dislocated", "60": "I-fixed", "61": "I-flat:name", "62": "I-goeswith", "63": "I-mark", "64": "I-nmod", "65": "I-nmod:poss", "66": "I-nsubj", "67": "I-nsubj:pass", "68": "I-nummod", "69": "I-obj", "70": "I-obl", "71": "I-obl:poss", "72": "I-obl:tmod", "73": "I-parataxis", "74": "I-punct", "75": "I-root", "76": "I-vocative", "77": "I-xcomp" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-acl": 0, "B-acl:relcl": 1, "B-advcl": 2, "B-advmod": 3, "B-appos": 4, "B-aux": 5, "B-aux:pass": 6, "B-case": 7, "B-cc": 8, "B-cc:preconj": 9, "B-ccomp": 10, "B-clf": 11, "B-compound": 12, "B-compound:prt": 13, "B-conj": 14, "B-cop": 15, "B-csubj": 16, "B-det": 17, "B-det:predet": 18, "B-discourse": 19, "B-dislocated": 20, "B-fixed": 21, "B-flat:name": 22, "B-goeswith": 23, "B-iobj": 24, "B-mark": 25, "B-nmod": 26, "B-nmod:poss": 27, "B-nsubj": 28, "B-nsubj:pass": 29, "B-nummod": 30, "B-obj": 31, "B-obl": 32, "B-obl:poss": 33, "B-obl:tmod": 34, "B-parataxis": 35, "B-punct": 36, "B-reparandum": 37, "B-root": 38, "B-vocative": 39, "B-xcomp": 40, "I-acl": 41, "I-acl:relcl": 42, "I-advcl": 43, "I-advmod": 44, "I-appos": 45, "I-aux": 46, "I-aux:pass": 47, "I-case": 48, "I-cc": 49, "I-ccomp": 50, "I-clf": 51, "I-compound": 52, "I-conj": 53, "I-cop": 54, "I-csubj": 55, "I-det": 56, "I-det:predet": 57, "I-discourse": 58, "I-dislocated": 59, "I-fixed": 60, "I-flat:name": 61, "I-goeswith": 62, "I-mark": 63, "I-nmod": 64, "I-nmod:poss": 65, "I-nsubj": 66, "I-nsubj:pass": 67, "I-nummod": 68, "I-obj": 69, "I-obl": 70, "I-obl:poss": 71, "I-obl:tmod": 72, "I-parataxis": 73, "I-punct": 74, "I-root": 75, "I-vocative": 76, "I-xcomp": 77 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 1, "position_embedding_type": "absolute", "tokenizer_class": "RemBertTokenizerFast", "torch_dtype": "float32", "transformers_version": "4.19.4", "type_vocab_size": 2, "use_cache": true, "vocab_size": 3005 }