morganchen1007's picture
Training in progress, step 500
24b2be1
raw
history blame
1.1 kB
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForTokenClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "B-AUTHOR",
"1": "I-AUTHOR",
"2": "B-PUBYEAR",
"3": "I-PUBYEAR",
"4": "B-TITLE",
"5": "I-TITLE",
"6": "B-SOURCE",
"7": "I-SOURCE",
"8": "B-SPAGE",
"9": "I-SPAGE",
"10": "B-YEAR",
"11": "I-YEAR",
"12": "O"
},
"initializer_range": 0.02,
"label2id": {
"B-AUTHOR": 0,
"B-PUBYEAR": 2,
"B-SOURCE": 6,
"B-SPAGE": 8,
"B-TITLE": 4,
"B-YEAR": 10,
"I-AUTHOR": 1,
"I-PUBYEAR": 3,
"I-SOURCE": 7,
"I-SPAGE": 9,
"I-TITLE": 5,
"I-YEAR": 11,
"O": 12
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.20.1",
"vocab_size": 30522
}