{ "_name_or_path": "microsoft/deberta-v3-small", "architectures": [ "DebertaV2ForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "finetuning_task": "ner", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "B-ORG", "2": "I-ORG" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-ORG": 1, "I-ORG": 2, "O": 0 }, "layer_norm_eps": 1e-07, "max_position_embeddings": 512, "max_relative_positions": -1, "model_type": "deberta-v2", "norm_rel_ebd": "layer_norm", "num_attention_heads": 12, "num_hidden_layers": 6, "pad_token_id": 0, "pooler_dropout": 0, "pooler_hidden_act": "gelu", "pooler_hidden_size": 768, "pos_att_type": [ "p2c", "c2p" ], "position_biased_input": false, "position_buckets": 256, "relative_attention": true, "share_att_key": true, "torch_dtype": "float32", "transformers_version": "4.34.1", "type_vocab_size": 0, "vocab_size": 128100 }