{ "adapter_type": "lora", "lora_alpha": 16, "lora_dropout": 0.1, "lora_r": 8, "target_modules": ["q_proj", "v_proj"] }