mamba1 / model.safetensors.index.json
ahatamiz's picture
Upload HF_GPTForCausalLM
9b2de53 verified
{
"metadata": {
"total_size": 5536451520
},
"weight_map": {
"lm_head.weight": "model-00002-of-00002.safetensors",
"transformer.embeddings.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.conv1d.bias": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.conv1d.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.dt_proj.bias": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.dt_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.in_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.out_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.attn.x_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.mlp.swiglu.w1.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.mlp.swiglu.w2.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.mlp.swiglu.w3.weight": "model-00002-of-00002.safetensors",
"transformer.h.17.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.norm_2.weight": "model-00002-of-00002.safetensors",
"transformer.h.2.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.A_log": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.D": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.conv1d.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.conv1d.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.dt_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.dt_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.in_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.x_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.swiglu.w1.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.swiglu.w2.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.swiglu.w3.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.norm_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.norm_2.weight": "model-00001-of-00002.safetensors",
"transformer.ln_f.weight": "model-00002-of-00002.safetensors"
}
}