nithyarajkumar commited on
Commit
9f0edc8
1 Parent(s): 92a6661

End of training

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
 
 
 
 
 
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "self_attn.v_proj",
23
+ "mlp.up_proj",
24
+ "self_attn.k_proj",
25
+ "mlp.down_proj",
26
+ "self_attn.q_proj",
27
+ "mlp.gate_proj",
28
+ "self_attn.o_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fe864d0efb75b1ae3e1c759a9556306aa4cb916db76a0f8db3ee78430cef7e6
3
- size 4517152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18d2a49e2aadfe354c6607af336139c36cdbd365a215c87b3436cfcba7906467
3
+ size 25271744
runs/Feb27_13-48-09_5ec149643f71/events.out.tfevents.1709041849.5ec149643f71.1298.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da030e9f127c2c8839d6fb7cea73e2b370650eed9149faa09b81dc69649e5369
3
+ size 4909
runs/Feb27_13-52-58_5ec149643f71/events.out.tfevents.1709042022.5ec149643f71.1298.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0525effe9a9ce352d6d7abee9bf8d84ee3aab9c72f7de5000cac93b985b0de67
3
+ size 6797
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f574573e8d8ee836277054301df61fbef17cdec875e5622e9801b94b356d355
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4db67eb70ee9d45e09893146b0cca95ad11e98b5d19dc2716a85079bcf78762b
3
  size 4920