Liu-Xiang commited on
Commit
5b25c95
1 Parent(s): e696beb

Training in progress, step 276

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "down_proj",
23
- "up_proj",
24
- "o_proj",
25
- "v_proj",
26
  "gate_proj",
 
27
  "q_proj",
28
- "k_proj"
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
 
 
22
  "gate_proj",
23
+ "down_proj",
24
  "q_proj",
25
+ "o_proj",
26
+ "up_proj",
27
+ "k_proj",
28
+ "v_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b12c830c1bca8aa81e618f303b95716ec5af1ba4b7464607c7f21985463586fe
3
  size 45258384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e46ca10551d982900da413e3a7c5492290a1e980bdd29abc26ceb39ad77617e
3
  size 45258384
runs/Aug07_12-30-57_genertive-ai-workbench-0/events.out.tfevents.1723033858.genertive-ai-workbench-0.508.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72ae972fdf0d00cb34d32bcfe325e0c7463441db69220d707cb4de368ac21b53
3
+ size 6509
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c432ac93af728ded33ce5368829293946b97b60f48d4914190128fdfdea07308
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e5be30d6498035738d9d43cc253c82d6afe0c7edc2e5860354169119cb2a31b
3
  size 5176