reformer-enwik8 / config.json
system's picture
system HF staff
Update config.json
0702a82
raw
history blame
1.28 kB
{
"architectures": [
"ReformerModelWithLMHead"
],
"attention_head_size": 128,
"attn_layers": [
"local",
"local",
"lsh",
"local",
"local",
"local",
"lsh",
"local",
"local",
"local",
"lsh",
"local"
],
"axial_norm_std": 1.0,
"axial_pos_embds": true,
"axial_pos_embds_dim": [
256,
768
],
"axial_pos_shape": [
128,
512
],
"chunk_size_feed_forward": 0,
"chunk_size_lm_head": 0,
"eos_token_id": 2,
"feed_forward_size": 4096,
"hash_seed": null,
"hidden_act": "relu",
"hidden_dropout_prob": 0.2,
"hidden_size": 1024,
"initializer_range": 0.02,
"is_decoder": true,
"layer_norm_eps": 1e-12,
"local_attention_probs_dropout_prob": 0.2,
"local_attn_chunk_length": 128,
"local_num_chunks_after": 0,
"local_num_chunks_before": 1,
"lsh_attention_probs_dropout_prob": 0.1,
"lsh_attn_chunk_length": 256,
"lsh_num_chunks_after": 0,
"lsh_num_chunks_before": 1,
"max_position_embeddings": 65536,
"model_type": "reformer",
"num_attention_heads": 8,
"num_buckets": 512,
"num_hashes": 4,
"output_past": true,
"pad_token_id": 0,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"max_length": 100
}
},
"vocab_size": 258
}