gokuls's picture
Training in progress, step 10000
d24493a
raw
history blame contribute delete
514 Bytes
{
"activation": "relu",
"architectures": [
"DistilBertForMaskedLM"
],
"attention_dropout": 0.005,
"dim": 512,
"dropout": 0.005,
"embedding_size": 128,
"hidden_dim": 2048,
"initializer_range": 0.02,
"max_position_embeddings": 512,
"model_type": "hybridbert",
"n_heads": 8,
"n_layers": 10,
"pad_token_id": 0,
"qa_dropout": 0.005,
"seq_classif_dropout": 0.005,
"sinusoidal_pos_embds": true,
"torch_dtype": "float32",
"transformers_version": "4.33.2",
"vocab_size": 30522
}