|
{ |
|
"_name_or_path": "SebastianSchramm/Cerebras-GPT-111M-instruction", |
|
"activation_function": "gelu", |
|
"architectures": [ |
|
"GPT2LMHeadModel" |
|
], |
|
"attn_pdrop": 0.0, |
|
"bos_token_id": 50256, |
|
"embd_pdrop": 0.0, |
|
"eos_token_id": 50256, |
|
"initializer_range": 0.02, |
|
"layer_norm_epsilon": 1e-05, |
|
"model_type": "gpt2", |
|
"n_embd": 768, |
|
"n_head": 12, |
|
"n_inner": 3072, |
|
"n_layer": 10, |
|
"n_positions": 2048, |
|
"quantization_config": { |
|
"batch_size": 1, |
|
"bits": 4, |
|
"block_name_to_quantize": "transformer.h", |
|
"damp_percent": 0.1, |
|
"dataset": "c4", |
|
"desc_act": false, |
|
"disable_exllama": false, |
|
"group_size": 128, |
|
"model_seqlen": 2048, |
|
"module_name_preceding_first_block": [ |
|
"transformer.wte", |
|
"transformer.wpe", |
|
"transformer.drop" |
|
], |
|
"pad_token_id": null, |
|
"quant_method": "gptq", |
|
"sym": true, |
|
"tokenizer": null, |
|
"true_sequential": true, |
|
"use_cuda_fp16": true |
|
}, |
|
"reorder_and_upcast_attn": false, |
|
"resid_pdrop": 0.0, |
|
"scale_attn_by_inverse_layer_idx": false, |
|
"scale_attn_weights": true, |
|
"summary_activation": null, |
|
"summary_first_dropout": 0.1, |
|
"summary_proj_to_labels": true, |
|
"summary_type": "cls_index", |
|
"summary_use_proj": true, |
|
"torch_dtype": "float16", |
|
"transformers_version": "4.33.0.dev0", |
|
"use_cache": true, |
|
"vocab_size": 50258 |
|
} |
|
|