chansung commited on
Commit
a160e4a
1 Parent(s): b19f347

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "v_proj",
24
- "q_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
24
+ "v_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffa0d5e053a7e6db6b59cecef500cb563c0bf9fe7e3eadfaeb726d3959c4c804
3
  size 4216407520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b35d856b622c37d4d230e4339c2eb77c64b7b79eda2623308a1d7aef4186109
3
  size 4216407520
runs/Nov18_01-59-42_main-lora-llama3-8b-coding-0-0/events.out.tfevents.1731914017.main-lora-llama3-8b-coding-0-0.458.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b6c454be505f30964ec62e8928ac487ee7368491ea8d6063bca61afc03c9237
3
+ size 9900
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aee2395cf3a0be797f651c8289b4a8db2a75ef89a741a506f037281d9e83f458
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcb7b86846ddf44e332d4f9c6ef9b883541910a2780ad18c94c9ef0a581758bc
3
  size 5624