0918_ma_lora_OLM_2 / adapter_config.json
Litzy0619's picture
Upload model
fc0e16a verified
raw
history blame contribute delete
912 Bytes
{
"adapter_emb_size": 512,
"adaptive_ratio": 0.01,
"adaptive_ratio_decay": 1.0,
"allow_empty_lora": false,
"auto_mapping": null,
"base_model_name_or_path": "allenai/OLMo-1B",
"bias": "none",
"dynamic_adapter_pool": true,
"enable_lora": null,
"fan_in_fan_out": false,
"inference_mode": true,
"input_based_adapter_selection": false,
"insert_zero_lora": false,
"layer_to_lora": [],
"lora_alpha": 16,
"lora_dropout": 0.05,
"merge_weights": false,
"modules_to_save": null,
"number_of_adapter_pre_layer": 8,
"ot_diversified_dispatcher": false,
"peft_type": "MA_LORA",
"pool_selective_inference": true,
"pool_selective_training": true,
"r": 16,
"random_routing": false,
"random_routing_inference": false,
"selective_num": 8,
"simple_hidden_matching": true,
"simple_instance_matching": true,
"target_modules": [
"att_proj"
],
"task_type": "CAUSAL_LM"
}