|
{ |
|
"architectures": [ |
|
"DbrxForCausalLM" |
|
], |
|
"attn_config": { |
|
"clip_qkv": 8, |
|
"kv_n_heads": 8, |
|
"model_type": "", |
|
"rope_theta": 500000 |
|
}, |
|
"d_model": 6144, |
|
"emb_pdrop": 0.0, |
|
"ffn_config": { |
|
"ffn_hidden_size": 10752, |
|
"model_type": "", |
|
"moe_jitter_eps": 0, |
|
"moe_loss_weight": 0.05, |
|
"moe_num_experts": 16, |
|
"moe_top_k": 4, |
|
"ffn_act_fn": { |
|
"name": "silu" |
|
} |
|
}, |
|
"initializer_range": 0.02, |
|
"max_seq_len": 32768, |
|
"model_type": "dbrx", |
|
"n_heads": 48, |
|
"n_layers": 40, |
|
"output_router_logits": false, |
|
"resid_pdrop": 0.0, |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.40.0", |
|
"use_cache": true, |
|
"vocab_size": 100352 |
|
} |
|
|