morgan commited on
Commit
bd8e5b7
1 Parent(s): 135a944

Training in progress, step 903

Browse files
adapter_config.json CHANGED
@@ -8,21 +8,21 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "lora_alpha": 32,
12
  "lora_dropout": 0.05,
13
  "modules_to_save": [
14
  "lm_head",
15
  "embed_tokens"
16
  ],
17
  "peft_type": "LORA",
18
- "r": 16,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "k_proj",
23
- "q_proj",
24
  "o_proj",
25
- "v_proj"
 
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "lora_alpha": 16,
12
  "lora_dropout": 0.05,
13
  "modules_to_save": [
14
  "lm_head",
15
  "embed_tokens"
16
  ],
17
  "peft_type": "LORA",
18
+ "r": 32,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "o_proj",
23
+ "k_proj",
24
+ "v_proj",
25
+ "q_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2190821174b6d4e9ddb7e164365e5fe12cab120856f8bab3d17fe3acff9c7119
3
- size 578881368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db7c657f23896f4007d679c837826db9d7deff124f24684d540d45c8b3620016
3
+ size 633407416
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92afb8c3d8929f20b62ac94e493d71f8f07af64aaa477be85988ca22d0e1ded4
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7996fa7271f62490bcb7c0a548cad3b765ec648c70677f878ca28b39b86629fb
3
  size 5368