|
{ |
|
"_name_or_path": "/home/project/11003280/sft_rlhf/models/7B_model/7B_SafeTensors", |
|
"architectures": [ |
|
"MPTForCausalLM" |
|
], |
|
"attn_config": { |
|
"alibi": false, |
|
"alibi_bias_max": 8, |
|
"attn_impl": "torch", |
|
"attn_pdrop": 0.0, |
|
"attn_type": "multihead_attention", |
|
"attn_uses_sequence_id": false, |
|
"clip_qkv": null, |
|
"prefix_lm": false, |
|
"qk_ln": true, |
|
"softmax_scale": null |
|
}, |
|
"auto_map": { |
|
"AutoConfig": "configuration_mpt.MPTConfig", |
|
"AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM" |
|
}, |
|
"d_model": 4096, |
|
"emb_pdrop": 0.0, |
|
"embedding_fraction": 0.1, |
|
"expansion_ratio": 4, |
|
"fc_type": "torch", |
|
"ffn_config": { |
|
"fc_type": "torch", |
|
"ffn_type": "mptmlp" |
|
}, |
|
"init_config": { |
|
"emb_init_std": null, |
|
"emb_init_uniform_lim": null, |
|
"fan_mode": "fan_in", |
|
"init_div_is_residual": true, |
|
"init_gain": 0.0, |
|
"init_nonlinearity": "relu", |
|
"init_std": null, |
|
"name": "kaiming_normal_", |
|
"verbose": 0 |
|
}, |
|
"init_config_defaults": { |
|
"init_std": 0.02 |
|
}, |
|
"init_device": "cpu", |
|
"learned_pos_emb": true, |
|
"logit_scale": "inv_sqrt_d_model", |
|
"max_seq_len": 2048, |
|
"model_type": "mpt", |
|
"n_heads": 32, |
|
"n_layers": 32, |
|
"no_bias": false, |
|
"norm_type": "low_precision_layernorm", |
|
"resid_pdrop": 0.0, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.34.0", |
|
"use_cache": false, |
|
"vocab_size": 256000 |
|
} |
|
|