|
{ |
|
"_name_or_path": "checkpoints/stabilityai/stablelm-zephyr-3b", |
|
"anyprec": { |
|
"arch_config": { |
|
"layers_name": "layers", |
|
"model_name": "model", |
|
"module_names": [ |
|
"self_attn.q_proj", |
|
"self_attn.k_proj", |
|
"self_attn.v_proj", |
|
"self_attn.o_proj", |
|
"mlp.gate_proj", |
|
"mlp.up_proj", |
|
"mlp.down_proj" |
|
] |
|
}, |
|
"group_count": 1, |
|
"parent_precision": 4, |
|
"seed_precision": 2, |
|
"sparse_numvals": { |
|
"model.layers.0.mlp.down_proj": 361261, |
|
"model.layers.0.mlp.gate_proj": 409239, |
|
"model.layers.0.mlp.up_proj": 382725, |
|
"model.layers.0.self_attn.k_proj": 1249422, |
|
"model.layers.0.self_attn.o_proj": 493934, |
|
"model.layers.0.self_attn.q_proj": 1383514, |
|
"model.layers.0.self_attn.v_proj": 582091, |
|
"model.layers.1.mlp.down_proj": 371941, |
|
"model.layers.1.mlp.gate_proj": 312127, |
|
"model.layers.1.mlp.up_proj": 350541, |
|
"model.layers.1.self_attn.k_proj": 739147, |
|
"model.layers.1.self_attn.o_proj": 870440, |
|
"model.layers.1.self_attn.q_proj": 754729, |
|
"model.layers.1.self_attn.v_proj": 790019, |
|
"model.layers.10.mlp.down_proj": 400494, |
|
"model.layers.10.mlp.gate_proj": 437481, |
|
"model.layers.10.mlp.up_proj": 389717, |
|
"model.layers.10.self_attn.k_proj": 217707, |
|
"model.layers.10.self_attn.o_proj": 159623, |
|
"model.layers.10.self_attn.q_proj": 204767, |
|
"model.layers.10.self_attn.v_proj": 168686, |
|
"model.layers.11.mlp.down_proj": 398251, |
|
"model.layers.11.mlp.gate_proj": 468437, |
|
"model.layers.11.mlp.up_proj": 399514, |
|
"model.layers.11.self_attn.k_proj": 223137, |
|
"model.layers.11.self_attn.o_proj": 153385, |
|
"model.layers.11.self_attn.q_proj": 207469, |
|
"model.layers.11.self_attn.v_proj": 157690, |
|
"model.layers.12.mlp.down_proj": 392789, |
|
"model.layers.12.mlp.gate_proj": 511766, |
|
"model.layers.12.mlp.up_proj": 388221, |
|
"model.layers.12.self_attn.k_proj": 195398, |
|
"model.layers.12.self_attn.o_proj": 142264, |
|
"model.layers.12.self_attn.q_proj": 185287, |
|
"model.layers.12.self_attn.v_proj": 152926, |
|
"model.layers.13.mlp.down_proj": 395950, |
|
"model.layers.13.mlp.gate_proj": 495319, |
|
"model.layers.13.mlp.up_proj": 404143, |
|
"model.layers.13.self_attn.k_proj": 218597, |
|
"model.layers.13.self_attn.o_proj": 159306, |
|
"model.layers.13.self_attn.q_proj": 206129, |
|
"model.layers.13.self_attn.v_proj": 168994, |
|
"model.layers.14.mlp.down_proj": 395967, |
|
"model.layers.14.mlp.gate_proj": 562906, |
|
"model.layers.14.mlp.up_proj": 390940, |
|
"model.layers.14.self_attn.k_proj": 194047, |
|
"model.layers.14.self_attn.o_proj": 145953, |
|
"model.layers.14.self_attn.q_proj": 182838, |
|
"model.layers.14.self_attn.v_proj": 153758, |
|
"model.layers.15.mlp.down_proj": 386374, |
|
"model.layers.15.mlp.gate_proj": 522112, |
|
"model.layers.15.mlp.up_proj": 395074, |
|
"model.layers.15.self_attn.k_proj": 196997, |
|
"model.layers.15.self_attn.o_proj": 150452, |
|
"model.layers.15.self_attn.q_proj": 184345, |
|
"model.layers.15.self_attn.v_proj": 156440, |
|
"model.layers.16.mlp.down_proj": 384573, |
|
"model.layers.16.mlp.gate_proj": 477452, |
|
"model.layers.16.mlp.up_proj": 386693, |
|
"model.layers.16.self_attn.k_proj": 215088, |
|
"model.layers.16.self_attn.o_proj": 145176, |
|
"model.layers.16.self_attn.q_proj": 193489, |
|
"model.layers.16.self_attn.v_proj": 157381, |
|
"model.layers.17.mlp.down_proj": 376292, |
|
"model.layers.17.mlp.gate_proj": 440742, |
|
"model.layers.17.mlp.up_proj": 372740, |
|
"model.layers.17.self_attn.k_proj": 198626, |
|
"model.layers.17.self_attn.o_proj": 138659, |
|
"model.layers.17.self_attn.q_proj": 184019, |
|
"model.layers.17.self_attn.v_proj": 146408, |
|
"model.layers.18.mlp.down_proj": 372954, |
|
"model.layers.18.mlp.gate_proj": 407526, |
|
"model.layers.18.mlp.up_proj": 359072, |
|
"model.layers.18.self_attn.k_proj": 195759, |
|
"model.layers.18.self_attn.o_proj": 143198, |
|
"model.layers.18.self_attn.q_proj": 178705, |
|
"model.layers.18.self_attn.v_proj": 154099, |
|
"model.layers.19.mlp.down_proj": 372170, |
|
"model.layers.19.mlp.gate_proj": 404678, |
|
"model.layers.19.mlp.up_proj": 362693, |
|
"model.layers.19.self_attn.k_proj": 181548, |
|
"model.layers.19.self_attn.o_proj": 141818, |
|
"model.layers.19.self_attn.q_proj": 167249, |
|
"model.layers.19.self_attn.v_proj": 144673, |
|
"model.layers.2.mlp.down_proj": 343350, |
|
"model.layers.2.mlp.gate_proj": 318733, |
|
"model.layers.2.mlp.up_proj": 347181, |
|
"model.layers.2.self_attn.k_proj": 727364, |
|
"model.layers.2.self_attn.o_proj": 206116, |
|
"model.layers.2.self_attn.q_proj": 632838, |
|
"model.layers.2.self_attn.v_proj": 178305, |
|
"model.layers.20.mlp.down_proj": 358950, |
|
"model.layers.20.mlp.gate_proj": 407599, |
|
"model.layers.20.mlp.up_proj": 359506, |
|
"model.layers.20.self_attn.k_proj": 174602, |
|
"model.layers.20.self_attn.o_proj": 139503, |
|
"model.layers.20.self_attn.q_proj": 169296, |
|
"model.layers.20.self_attn.v_proj": 136204, |
|
"model.layers.21.mlp.down_proj": 357960, |
|
"model.layers.21.mlp.gate_proj": 385992, |
|
"model.layers.21.mlp.up_proj": 348288, |
|
"model.layers.21.self_attn.k_proj": 185211, |
|
"model.layers.21.self_attn.o_proj": 142190, |
|
"model.layers.21.self_attn.q_proj": 176282, |
|
"model.layers.21.self_attn.v_proj": 142371, |
|
"model.layers.22.mlp.down_proj": 361293, |
|
"model.layers.22.mlp.gate_proj": 377918, |
|
"model.layers.22.mlp.up_proj": 353747, |
|
"model.layers.22.self_attn.k_proj": 179724, |
|
"model.layers.22.self_attn.o_proj": 143106, |
|
"model.layers.22.self_attn.q_proj": 167648, |
|
"model.layers.22.self_attn.v_proj": 145138, |
|
"model.layers.23.mlp.down_proj": 358986, |
|
"model.layers.23.mlp.gate_proj": 378110, |
|
"model.layers.23.mlp.up_proj": 356957, |
|
"model.layers.23.self_attn.k_proj": 178196, |
|
"model.layers.23.self_attn.o_proj": 142376, |
|
"model.layers.23.self_attn.q_proj": 168788, |
|
"model.layers.23.self_attn.v_proj": 143664, |
|
"model.layers.24.mlp.down_proj": 354332, |
|
"model.layers.24.mlp.gate_proj": 366565, |
|
"model.layers.24.mlp.up_proj": 346526, |
|
"model.layers.24.self_attn.k_proj": 185428, |
|
"model.layers.24.self_attn.o_proj": 140813, |
|
"model.layers.24.self_attn.q_proj": 176689, |
|
"model.layers.24.self_attn.v_proj": 139483, |
|
"model.layers.25.mlp.down_proj": 353602, |
|
"model.layers.25.mlp.gate_proj": 343542, |
|
"model.layers.25.mlp.up_proj": 345700, |
|
"model.layers.25.self_attn.k_proj": 178173, |
|
"model.layers.25.self_attn.o_proj": 146206, |
|
"model.layers.25.self_attn.q_proj": 166860, |
|
"model.layers.25.self_attn.v_proj": 144246, |
|
"model.layers.26.mlp.down_proj": 356005, |
|
"model.layers.26.mlp.gate_proj": 339690, |
|
"model.layers.26.mlp.up_proj": 347487, |
|
"model.layers.26.self_attn.k_proj": 196789, |
|
"model.layers.26.self_attn.o_proj": 140962, |
|
"model.layers.26.self_attn.q_proj": 179600, |
|
"model.layers.26.self_attn.v_proj": 145076, |
|
"model.layers.27.mlp.down_proj": 354147, |
|
"model.layers.27.mlp.gate_proj": 333874, |
|
"model.layers.27.mlp.up_proj": 342862, |
|
"model.layers.27.self_attn.k_proj": 191073, |
|
"model.layers.27.self_attn.o_proj": 138949, |
|
"model.layers.27.self_attn.q_proj": 174841, |
|
"model.layers.27.self_attn.v_proj": 139074, |
|
"model.layers.28.mlp.down_proj": 350013, |
|
"model.layers.28.mlp.gate_proj": 343786, |
|
"model.layers.28.mlp.up_proj": 341398, |
|
"model.layers.28.self_attn.k_proj": 193596, |
|
"model.layers.28.self_attn.o_proj": 141809, |
|
"model.layers.28.self_attn.q_proj": 174614, |
|
"model.layers.28.self_attn.v_proj": 143142, |
|
"model.layers.29.mlp.down_proj": 356645, |
|
"model.layers.29.mlp.gate_proj": 382614, |
|
"model.layers.29.mlp.up_proj": 351816, |
|
"model.layers.29.self_attn.k_proj": 192295, |
|
"model.layers.29.self_attn.o_proj": 147619, |
|
"model.layers.29.self_attn.q_proj": 174798, |
|
"model.layers.29.self_attn.v_proj": 148455, |
|
"model.layers.3.mlp.down_proj": 353085, |
|
"model.layers.3.mlp.gate_proj": 310395, |
|
"model.layers.3.mlp.up_proj": 352469, |
|
"model.layers.3.self_attn.k_proj": 329393, |
|
"model.layers.3.self_attn.o_proj": 179470, |
|
"model.layers.3.self_attn.q_proj": 307652, |
|
"model.layers.3.self_attn.v_proj": 163581, |
|
"model.layers.30.mlp.down_proj": 380293, |
|
"model.layers.30.mlp.gate_proj": 435280, |
|
"model.layers.30.mlp.up_proj": 352518, |
|
"model.layers.30.self_attn.k_proj": 178469, |
|
"model.layers.30.self_attn.o_proj": 156350, |
|
"model.layers.30.self_attn.q_proj": 165420, |
|
"model.layers.30.self_attn.v_proj": 157109, |
|
"model.layers.31.mlp.down_proj": 407998, |
|
"model.layers.31.mlp.gate_proj": 478129, |
|
"model.layers.31.mlp.up_proj": 360125, |
|
"model.layers.31.self_attn.k_proj": 162466, |
|
"model.layers.31.self_attn.o_proj": 180364, |
|
"model.layers.31.self_attn.q_proj": 160350, |
|
"model.layers.31.self_attn.v_proj": 166443, |
|
"model.layers.4.mlp.down_proj": 351602, |
|
"model.layers.4.mlp.gate_proj": 303771, |
|
"model.layers.4.mlp.up_proj": 351399, |
|
"model.layers.4.self_attn.k_proj": 269033, |
|
"model.layers.4.self_attn.o_proj": 159082, |
|
"model.layers.4.self_attn.q_proj": 257637, |
|
"model.layers.4.self_attn.v_proj": 156983, |
|
"model.layers.5.mlp.down_proj": 350684, |
|
"model.layers.5.mlp.gate_proj": 325463, |
|
"model.layers.5.mlp.up_proj": 347872, |
|
"model.layers.5.self_attn.k_proj": 237619, |
|
"model.layers.5.self_attn.o_proj": 179358, |
|
"model.layers.5.self_attn.q_proj": 227821, |
|
"model.layers.5.self_attn.v_proj": 191027, |
|
"model.layers.6.mlp.down_proj": 364690, |
|
"model.layers.6.mlp.gate_proj": 341908, |
|
"model.layers.6.mlp.up_proj": 356264, |
|
"model.layers.6.self_attn.k_proj": 200776, |
|
"model.layers.6.self_attn.o_proj": 144559, |
|
"model.layers.6.self_attn.q_proj": 189252, |
|
"model.layers.6.self_attn.v_proj": 163452, |
|
"model.layers.7.mlp.down_proj": 367623, |
|
"model.layers.7.mlp.gate_proj": 355908, |
|
"model.layers.7.mlp.up_proj": 361007, |
|
"model.layers.7.self_attn.k_proj": 192816, |
|
"model.layers.7.self_attn.o_proj": 141675, |
|
"model.layers.7.self_attn.q_proj": 182592, |
|
"model.layers.7.self_attn.v_proj": 147459, |
|
"model.layers.8.mlp.down_proj": 381346, |
|
"model.layers.8.mlp.gate_proj": 392814, |
|
"model.layers.8.mlp.up_proj": 385174, |
|
"model.layers.8.self_attn.k_proj": 214820, |
|
"model.layers.8.self_attn.o_proj": 148998, |
|
"model.layers.8.self_attn.q_proj": 205237, |
|
"model.layers.8.self_attn.v_proj": 154614, |
|
"model.layers.9.mlp.down_proj": 384446, |
|
"model.layers.9.mlp.gate_proj": 408833, |
|
"model.layers.9.mlp.up_proj": 386970, |
|
"model.layers.9.self_attn.k_proj": 203316, |
|
"model.layers.9.self_attn.o_proj": 150194, |
|
"model.layers.9.self_attn.q_proj": 197655, |
|
"model.layers.9.self_attn.v_proj": 157709 |
|
} |
|
}, |
|
"architectures": [ |
|
"StableLmForCausalLM" |
|
], |
|
"attention_dropout": 0.0, |
|
"bos_token_id": 0, |
|
"eos_token_id": 0, |
|
"hidden_act": "silu", |
|
"hidden_dropout": 0.0, |
|
"hidden_size": 2560, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 6912, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 4096, |
|
"model_type": "stablelm", |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 32, |
|
"partial_rotary_factor": 0.25, |
|
"rope_scaling": null, |
|
"rope_theta": 10000, |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "float16", |
|
"transformers_version": "4.39.3", |
|
"use_cache": true, |
|
"use_qkv_bias": false, |
|
"vocab_size": 50304 |
|
} |
|
|