|
{ |
|
"alpha_pattern": {}, |
|
"auto_mapping": { |
|
"base_model_class": "LlamaForCausalLM", |
|
"parent_library": "transformers.models.llama.modeling_llama" |
|
}, |
|
"base_model_name_or_path": "meta-llama/Meta-Llama-3.1-8B", |
|
"bias": "none", |
|
"fan_in_fan_out": false, |
|
"inference_mode": true, |
|
"init_lora_weights": true, |
|
"layer_replication": null, |
|
"layers_pattern": null, |
|
"layers_to_transform": null, |
|
"loftq_config": {}, |
|
"lora_alpha": 8, |
|
"lora_dropout": 0.0, |
|
"megatron_config": null, |
|
"megatron_core": "megatron.core", |
|
"modules_to_save": null, |
|
"peft_type": "LORA", |
|
"r": 8, |
|
"rank_pattern": {}, |
|
"revision": null, |
|
"target_modules": [ |
|
"gate_proj", |
|
"o_proj", |
|
"down_proj", |
|
"k_proj", |
|
"v_proj", |
|
"q_proj", |
|
"up_proj" |
|
], |
|
"task_type": null, |
|
"use_dora": false, |
|
"use_rslora": false |
|
} |