andrewAmani commited on
Commit
1e604bc
1 Parent(s): 4cddc6f

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "v_proj",
24
- "q_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
24
+ "v_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3daae1446d185763cdb01e22e779001493a714907311bf7e5c38e8f3609cca78
3
  size 54543184
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:510fa97996ef406847d68b49a807e22adb6e1764ba5b4f8c5de3f6633b8ee9b3
3
  size 54543184
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74cf575ee6a3c6081aee8b8a4596e7e70696f22254d3f979899b14886f830e72
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77e6c8bc626e58754f88a39c6f350b276a7847fe2d171da29abc0b508fd4670e
3
  size 5112