|
{ |
|
"_name_or_path": "/kaggle/input/phobertv1/kaggle/working/fields_classification/checkpoint-4610", |
|
"architectures": [ |
|
"RobertaForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "B\u1ea3o_hi\u1ec3m", |
|
"1": "B\u1ea5t_\u0111\u1ed9ng_s\u1ea3n", |
|
"2": "B\u1ed9_m\u00e1y h\u00e0nh_ch\u00ednh", |
|
"3": "Ch\u1ee9ng_kho\u00e1n", |
|
"4": "C\u00f4ng_ngh\u1ec7_th\u00f4ng_tin", |
|
"5": "Doanh_nghi\u1ec7p", |
|
"6": "D\u1ecbch_v\u1ee5 ph\u00e1p_l\u00fd", |
|
"7": "Giao_th\u00f4ng - V\u1eadn_t\u1ea3i", |
|
"8": "Gi\u00e1o_d\u1ee5c", |
|
"9": "K\u1ebf_to\u00e1n - Ki\u1ec3m_to\u00e1n", |
|
"10": "Lao_\u0111\u1ed9ng - Ti\u1ec1n_l\u01b0\u01a1ng", |
|
"11": "L\u0129nh_v\u1ef1c kh\u00e1c", |
|
"12": "Quy\u1ec1n d\u00e2n_s\u1ef1", |
|
"13": "S\u1edf_h\u1eefu_tr\u00ed_tu\u1ec7", |
|
"14": "Thu\u1ebf - Ph\u00ed - L\u1ec7_Ph\u00ed", |
|
"15": "Th\u01b0\u01a1ng_m\u1ea1i", |
|
"16": "Th\u1ec3_thao - Y_t\u1ebf", |
|
"17": "Th\u1ee7_t\u1ee5c T\u1ed1_t\u1ee5ng", |
|
"18": "Ti\u1ec1n_t\u1ec7 - Ng\u00e2n_h\u00e0ng", |
|
"19": "Tr\u00e1ch_nhi\u1ec7m h\u00ecnh_s\u1ef1", |
|
"20": "T\u00e0i_ch\u00ednh nh\u00e0_n\u01b0\u1edbc", |
|
"21": "Vi_ph\u1ea1m h\u00e0nh_ch\u00ednh", |
|
"22": "V\u0103n_ho\u00e1 - X\u00e3_h\u1ed9i", |
|
"23": "Xu\u1ea5t_nh\u1eadp_kh\u1ea9u", |
|
"24": "X\u00e2y_d\u1ef1ng - \u0110\u00f4_th\u1ecb", |
|
"25": "\u0110\u1ea7u_t\u01b0" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"B\u1ea3o_hi\u1ec3m": 0, |
|
"B\u1ea5t_\u0111\u1ed9ng_s\u1ea3n": 1, |
|
"B\u1ed9_m\u00e1y h\u00e0nh_ch\u00ednh": 2, |
|
"Ch\u1ee9ng_kho\u00e1n": 3, |
|
"C\u00f4ng_ngh\u1ec7_th\u00f4ng_tin": 4, |
|
"Doanh_nghi\u1ec7p": 5, |
|
"D\u1ecbch_v\u1ee5 ph\u00e1p_l\u00fd": 6, |
|
"Giao_th\u00f4ng - V\u1eadn_t\u1ea3i": 7, |
|
"Gi\u00e1o_d\u1ee5c": 8, |
|
"K\u1ebf_to\u00e1n - Ki\u1ec3m_to\u00e1n": 9, |
|
"Lao_\u0111\u1ed9ng - Ti\u1ec1n_l\u01b0\u01a1ng": 10, |
|
"L\u0129nh_v\u1ef1c kh\u00e1c": 11, |
|
"Quy\u1ec1n d\u00e2n_s\u1ef1": 12, |
|
"S\u1edf_h\u1eefu_tr\u00ed_tu\u1ec7": 13, |
|
"Thu\u1ebf - Ph\u00ed - L\u1ec7_Ph\u00ed": 14, |
|
"Th\u01b0\u01a1ng_m\u1ea1i": 15, |
|
"Th\u1ec3_thao - Y_t\u1ebf": 16, |
|
"Th\u1ee7_t\u1ee5c T\u1ed1_t\u1ee5ng": 17, |
|
"Ti\u1ec1n_t\u1ec7 - Ng\u00e2n_h\u00e0ng": 18, |
|
"Tr\u00e1ch_nhi\u1ec7m h\u00ecnh_s\u1ef1": 19, |
|
"T\u00e0i_ch\u00ednh nh\u00e0_n\u01b0\u1edbc": 20, |
|
"Vi_ph\u1ea1m h\u00e0nh_ch\u00ednh": 21, |
|
"V\u0103n_ho\u00e1 - X\u00e3_h\u1ed9i": 22, |
|
"Xu\u1ea5t_nh\u1eadp_kh\u1ea9u": 23, |
|
"X\u00e2y_d\u1ef1ng - \u0110\u00f4_th\u1ecb": 24, |
|
"\u0110\u1ea7u_t\u01b0": 25 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 258, |
|
"model_type": "roberta", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"problem_type": "multi_label_classification", |
|
"tokenizer_class": "PhobertTokenizer", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.33.0", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 64001 |
|
} |
|
|