ucmp137538 commited on
Commit
44584bb
1 Parent(s): 350dbc1

End of training

Browse files
README.md CHANGED
@@ -17,7 +17,7 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  This model is a fine-tuned version of [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 1.0668
21
 
22
  ## Model description
23
 
@@ -50,10 +50,10 @@ The following hyperparameters were used during training:
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:-----:|:----:|:---------------:|
53
- | 1.166 | 1.0 | 694 | 1.0270 |
54
- | 1.0118 | 2.0 | 1388 | 1.0216 |
55
- | 0.7775 | 3.0 | 2082 | 1.0420 |
56
- | 0.6204 | 4.0 | 2776 | 1.0668 |
57
 
58
 
59
  ### Framework versions
 
17
 
18
  This model is a fine-tuned version of [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 0.5922
21
 
22
  ## Model description
23
 
 
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:-----:|:----:|:---------------:|
53
+ | 0.6798 | 1.0 | 694 | 0.5959 |
54
+ | 0.538 | 2.0 | 1388 | 0.5740 |
55
+ | 0.4497 | 3.0 | 2082 | 0.5717 |
56
+ | 0.3353 | 4.0 | 2776 | 0.5922 |
57
 
58
 
59
  ### Framework versions
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "q_proj",
23
+ "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:31538b96d7754c6b768c5a43f817f09bd574546c87981b1060d7b4d72f5e9bb5
3
  size 1182877280
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd069d5a3613cd5ac7b74504d5cf6b28ad0548639e8200ea19af9acc5b34aeaa
3
  size 1182877280
runs/Mar25_21-50-33_0e089ef8b5b4/events.out.tfevents.1711403444.0e089ef8b5b4.2571.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb98be2611abe1302f73fde1b8c297e7f7a04ecde62958d28ce8cbc5fa1ebf71
3
+ size 8220
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711403828.0e089ef8b5b4.6906.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a548d6087d748dac2729d0aad81b8ee6ec4fe792d6d252d15a10a98429b9b31
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711403937.0e089ef8b5b4.6906.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fad4dfa02eb0d94692632fad67415038040d3454534b136f2558207efa5b55c
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711403993.0e089ef8b5b4.6906.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e2855c7a19a7c39fbfe53a0b25627a2c45b6151a865b835f223d87d2eaa7e7f
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711404343.0e089ef8b5b4.6906.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd9b34349393bad745a92d534d0cb85608e61bb3cc873e9d732853d5a6904b3e
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711404610.0e089ef8b5b4.6906.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44d7f92bcc1f12ddea616043ca6c43d715625842717a014e68d02b51811abe38
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711404638.0e089ef8b5b4.6906.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d496772b31a0b8108d82caa1e7e62e0d7edefc5482e5b669cb35e84db5eddf5
3
+ size 4781
runs/Mar25_21-56-53_0e089ef8b5b4/events.out.tfevents.1711404654.0e089ef8b5b4.6906.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62c1a3106d02486be047341a925821dffc1c2362b62a74582ca98e26469aec3f
3
+ size 4781
runs/Mar25_22-35-54_0e089ef8b5b4/events.out.tfevents.1711406161.0e089ef8b5b4.6906.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a8f1913a7584a28816af207fa5643c63d57affb96866c1e4d4c35e7ca92aa21
3
+ size 4935
runs/Mar25_22-37-20_0e089ef8b5b4/events.out.tfevents.1711406244.0e089ef8b5b4.6906.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbc4d3008763b8ab75b5b50c9401cc7724b52cebcde2391c93075ad432713ac5
3
+ size 23631
runs/Mar25_23-54-27_0e089ef8b5b4/events.out.tfevents.1711410876.0e089ef8b5b4.6906.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c7136842b263144117fb33b4f7ac309d2813c44dc6490f9b717fcd53ab85021
3
+ size 23631
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 1024,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c47e0cb83060c1e51084db6f675bedf6c76e89403672c930731d185f41e6d76
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f9ed7b2a10a655cbb2c8359b2a8023e20db4efb000524d2a8cce325dc5fa8a8
3
  size 4728