File size: 2,448 Bytes
6d52efc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
{
"_name_or_path": "microsoft/deberta-v3-base",
"architectures": [
"DebertaV2ForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "CC",
"1": "CN 1",
"2": "CN 2",
"3": "CN 3",
"4": "Condition",
"5": "Cooling off",
"6": "DS 1",
"7": "Early",
"8": "Extra Requisition",
"9": "GST",
"10": "HBCF",
"11": "Incap 1",
"12": "Incap 2",
"13": "Incap 3",
"14": "Interest on Deposit",
"15": "L Back",
"16": "MV",
"17": "OTP Adjustment",
"18": "Probate",
"19": "RC",
"20": "Release",
"21": "Req",
"22": "TA",
"23": "TF Form",
"24": "amd 1",
"25": "amd 10",
"26": "amd 2",
"27": "amd 3",
"28": "amd 4",
"29": "amd 5",
"30": "amd 6",
"31": "amd 7",
"32": "amd 8",
"33": "amd 9",
"34": "as",
"35": "lc 1",
"36": "lc 2",
"37": "lc 3",
"38": "lc 4",
"39": "lc 5"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"CC": 0,
"CN 1": 1,
"CN 2": 2,
"CN 3": 3,
"Condition": 4,
"Cooling off": 5,
"DS 1": 6,
"Early": 7,
"Extra Requisition": 8,
"GST": 9,
"HBCF": 10,
"Incap 1": 11,
"Incap 2": 12,
"Incap 3": 13,
"Interest on Deposit": 14,
"L Back": 15,
"MV": 16,
"OTP Adjustment": 17,
"Probate": 18,
"RC": 19,
"Release": 20,
"Req": 21,
"TA": 22,
"TF Form": 23,
"amd 1": 24,
"amd 10": 25,
"amd 2": 26,
"amd 3": 27,
"amd 4": 28,
"amd 5": 29,
"amd 6": 30,
"amd 7": 31,
"amd 8": 32,
"amd 9": 33,
"as": 34,
"lc 1": 35,
"lc 2": 36,
"lc 3": 37,
"lc 4": 38,
"lc 5": 39
},
"layer_norm_eps": 1e-07,
"max_position_embeddings": 512,
"max_relative_positions": -1,
"model_type": "deberta-v2",
"norm_rel_ebd": "layer_norm",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"pooler_dropout": 0,
"pooler_hidden_act": "gelu",
"pooler_hidden_size": 768,
"pos_att_type": [
"p2c",
"c2p"
],
"position_biased_input": false,
"position_buckets": 256,
"problem_type": "multi_label_classification",
"relative_attention": true,
"share_att_key": true,
"torch_dtype": "float32",
"transformers_version": "4.39.2",
"type_vocab_size": 0,
"vocab_size": 128570
}
|