{ "alpha_pattern": {}, "auto_mapping": null, "base_model_name_or_path": "unsloth/Phi-3-mini-4k-instruct", "bias": "none", "fan_in_fan_out": null, "inference_mode": true, "init_lora_weights": true, "layer_replication": [ [ 0, 8 ], [ 4, 12 ], [ 8, 16 ], [ 12, 20 ], [ 16, 24 ], [ 20, 28 ], [ 24, 32 ] ], "layers_pattern": null, "layers_to_transform": null, "loftq_config": {}, "lora_alpha": 32, "lora_dropout": 0.05, "megatron_config": null, "megatron_core": "megatron.core", "modules_to_save": [ "embed_tokens", "lm_head" ], "peft_type": "LORA", "r": 64, "rank_pattern": {}, "revision": null, "target_modules": [ "up_proj", "gate_proj", "q_proj", "k_proj", "down_proj", "o_proj", "v_proj" ], "task_type": "CAUSAL_LM", "use_dora": false, "use_rslora": false }