{ "_name_or_path": "microsoft/deberta-v3-base", "architectures": [ "DebertaV2ForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-APL", "1": "B-CMT", "2": "B-DSC", "3": "B-MAT", "4": "B-PRO", "5": "B-SMT", "6": "B-SPL", "7": "I-APL", "8": "I-CMT", "9": "I-DSC", "10": "I-MAT", "11": "I-PRO", "12": "I-SMT", "13": "I-SPL", "14": "O" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-APL": 0, "B-CMT": 1, "B-DSC": 2, "B-MAT": 3, "B-PRO": 4, "B-SMT": 5, "B-SPL": 6, "I-APL": 7, "I-CMT": 8, "I-DSC": 9, "I-MAT": 10, "I-PRO": 11, "I-SMT": 12, "I-SPL": 13, "O": 14 }, "layer_norm_eps": 1e-07, "max_position_embeddings": 512, "max_relative_positions": -1, "model_type": "deberta-v2", "norm_rel_ebd": "layer_norm", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_dropout": 0, "pooler_hidden_act": "gelu", "pooler_hidden_size": 768, "pos_att_type": [ "p2c", "c2p" ], "position_biased_input": false, "position_buckets": 256, "relative_attention": true, "share_att_key": true, "torch_dtype": "float32", "transformers_version": "4.41.2", "type_vocab_size": 0, "vocab_size": 128100 }