|
{ |
|
"_name_or_path": "microsoft/Phi-3-small-8k-instruct", |
|
"architectures": [ |
|
"Phi3SmallForCausalLM" |
|
], |
|
"attention_bias": false, |
|
"attention_dropout_prob": 0.0, |
|
"auto_map": { |
|
"AutoConfig": "microsoft/Phi-3-small-8k-instruct--configuration_phi3_small.Phi3SmallConfig", |
|
"AutoModelForCausalLM": "microsoft/Phi-3-small-8k-instruct--modeling_phi3_small.Phi3SmallForCausalLM", |
|
"AutoTokenizer": "microsoft/Phi-3-small-8k-instruct--tokenization_phi3_small.Phi3SmallTokenizer" |
|
}, |
|
"blocksparse_block_size": 64, |
|
"blocksparse_homo_head_pattern": false, |
|
"blocksparse_num_local_blocks": 16, |
|
"blocksparse_triton_kernel_block_size": 64, |
|
"blocksparse_vert_stride": 8, |
|
"bos_token_id": 100257, |
|
"dense_attention_every_n_layers": 2, |
|
"dummy_token_indices": [ |
|
100256, |
|
100258, |
|
100259, |
|
100260, |
|
100264, |
|
100265, |
|
100267, |
|
100268, |
|
100269, |
|
100270, |
|
100271, |
|
100272, |
|
100273, |
|
100274, |
|
100275, |
|
100276, |
|
100277, |
|
100278, |
|
100279, |
|
100280, |
|
100281, |
|
100282, |
|
100283, |
|
100284, |
|
100285, |
|
100286, |
|
100287, |
|
100288, |
|
100289, |
|
100290, |
|
100291, |
|
100292, |
|
100293, |
|
100294, |
|
100295, |
|
100296, |
|
100297, |
|
100298, |
|
100299, |
|
100300, |
|
100301, |
|
100302, |
|
100303, |
|
100304, |
|
100305, |
|
100306, |
|
100307, |
|
100308, |
|
100309, |
|
100310, |
|
100311, |
|
100312, |
|
100313, |
|
100314, |
|
100315, |
|
100316, |
|
100317, |
|
100318, |
|
100319, |
|
100320, |
|
100321, |
|
100322, |
|
100323, |
|
100324, |
|
100325, |
|
100326, |
|
100327, |
|
100328, |
|
100329, |
|
100330, |
|
100331, |
|
100332, |
|
100333, |
|
100334, |
|
100335, |
|
100336, |
|
100337, |
|
100338, |
|
100339, |
|
100340, |
|
100341, |
|
100342, |
|
100343, |
|
100344, |
|
100345, |
|
100346, |
|
100347, |
|
100348, |
|
100349, |
|
100350, |
|
100351 |
|
], |
|
"embedding_dropout_prob": 0.1, |
|
"eos_token_id": 100257, |
|
"ff_dim_multiplier": null, |
|
"ff_intermediate_size": 14336, |
|
"ffn_dropout_prob": 0.1, |
|
"gegelu_limit": 20.0, |
|
"gegelu_pad_to_256": true, |
|
"hidden_act": "gegelu", |
|
"hidden_size": 4096, |
|
"initializer_range": 0.02, |
|
"layer_norm_epsilon": 1e-05, |
|
"max_position_embeddings": 8192, |
|
"model_type": "phi3small", |
|
"mup_attn_multiplier": 1.0, |
|
"mup_embedding_multiplier": 10.0, |
|
"mup_use_scaling": true, |
|
"mup_width_multiplier": 8.0, |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 8, |
|
"pad_sequence_to_multiple_of_64": true, |
|
"quantization_config": { |
|
"batch_size": 1, |
|
"bits": 4, |
|
"block_name_to_quantize": null, |
|
"cache_block_outputs": true, |
|
"damp_percent": 0.1, |
|
"dataset": "c4", |
|
"desc_act": false, |
|
"exllama_config": { |
|
"version": 1 |
|
}, |
|
"group_size": 128, |
|
"max_input_length": null, |
|
"model_seqlen": null, |
|
"module_name_preceding_first_block": null, |
|
"modules_in_block_to_quantize": null, |
|
"pad_token_id": null, |
|
"quant_method": "gptq", |
|
"sym": true, |
|
"tokenizer": null, |
|
"true_sequential": true, |
|
"use_cuda_fp16": false, |
|
"use_exllama": true |
|
}, |
|
"reorder_and_upcast_attn": false, |
|
"rope_embedding_base": 1000000, |
|
"rope_position_scale": 1.0, |
|
"rope_scaling": null, |
|
"torch_dtype": "float16", |
|
"transformers_version": "4.41.2", |
|
"use_cache": true, |
|
"vocab_size": 100352 |
|
} |
|
|