{ | |
"auto_mapping": { | |
"base_model_class": "Gemma2ForCausalLM", | |
"parent_library": "transformers.models.gemma2.modeling_gemma2" | |
}, | |
"base_model_name_or_path": "/home/lab/halyn/gemma/seok/MA-LMM/llm/gemma-2-2b-it", | |
"bias": "none", | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"lora_alpha": 32, | |
"lora_dropout": 0.1, | |
"modules_to_save": null, | |
"peft_type": "LORA", | |
"r": 16, | |
"revision": null, | |
"target_modules": [ | |
"q_proj", | |
"o_proj", | |
"k_proj", | |
"v_proj", | |
"gate_proj", | |
"up_proj", | |
"down_proj" | |
], | |
"task_type": "TEXT_GENERATION" | |
} |