Sidziesama's picture
Training in progress epoch 0
b5a4ae6
raw
history blame
1.84 kB
{
"_name_or_path": "distilbert/distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForTokenClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "B-CASE_NUMBER",
"1": "B-COURT",
"2": "B-DATE",
"3": "B-GPE",
"4": "B-JUDGE",
"5": "B-LAWYER",
"6": "B-ORG",
"7": "B-OTHER_PERSON",
"8": "B-PETITIONER",
"9": "B-PRECEDENT",
"10": "B-PROVISION",
"11": "B-RESPONDENT",
"12": "B-STATUTE",
"13": "B-WITNESS",
"14": "I-CASE_NUMBER",
"15": "I-COURT",
"16": "I-DATE",
"17": "I-GPE",
"18": "I-JUDGE",
"19": "I-LAWYER",
"20": "I-ORG",
"21": "I-OTHER_PERSON",
"22": "I-PETITIONER",
"23": "I-PRECEDENT",
"24": "I-PROVISION",
"25": "I-RESPONDENT",
"26": "I-STATUTE",
"27": "I-WITNESS",
"28": "O"
},
"initializer_range": 0.02,
"label2id": {
"B-CASE_NUMBER": 0,
"B-COURT": 1,
"B-DATE": 2,
"B-GPE": 3,
"B-JUDGE": 4,
"B-LAWYER": 5,
"B-ORG": 6,
"B-OTHER_PERSON": 7,
"B-PETITIONER": 8,
"B-PRECEDENT": 9,
"B-PROVISION": 10,
"B-RESPONDENT": 11,
"B-STATUTE": 12,
"B-WITNESS": 13,
"I-CASE_NUMBER": 14,
"I-COURT": 15,
"I-DATE": 16,
"I-GPE": 17,
"I-JUDGE": 18,
"I-LAWYER": 19,
"I-ORG": 20,
"I-OTHER_PERSON": 21,
"I-PETITIONER": 22,
"I-PRECEDENT": 23,
"I-PROVISION": 24,
"I-RESPONDENT": 25,
"I-STATUTE": 26,
"I-WITNESS": 27,
"O": 28
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"transformers_version": "4.39.3",
"vocab_size": 30522
}