lc111 commited on
Commit
802f48e
1 Parent(s): efd9bfc

End of training

Browse files
Files changed (1) hide show
  1. README.md +7 -5
README.md CHANGED
@@ -1,10 +1,11 @@
1
  ---
2
  license: apache-2.0
3
- base_model: PY007/TinyLlama-1.1B-Chat-v0.3
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
 
8
  model-index:
9
  - name: tinyllama-colorist-lora
10
  results: []
@@ -42,7 +43,7 @@ The following hyperparameters were used during training:
42
  - total_train_batch_size: 32
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: cosine
45
- - training_steps: 200
46
  - mixed_precision_training: Native AMP
47
 
48
  ### Training results
@@ -51,7 +52,8 @@ The following hyperparameters were used during training:
51
 
52
  ### Framework versions
53
 
54
- - Transformers 4.35.2
55
- - Pytorch 2.1.0+cu121
 
56
  - Datasets 2.16.1
57
- - Tokenizers 0.15.0
 
1
  ---
2
  license: apache-2.0
3
+ library_name: peft
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
8
+ base_model: PY007/TinyLlama-1.1B-Chat-v0.3
9
  model-index:
10
  - name: tinyllama-colorist-lora
11
  results: []
 
43
  - total_train_batch_size: 32
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
+ - training_steps: 500
47
  - mixed_precision_training: Native AMP
48
 
49
  ### Training results
 
52
 
53
  ### Framework versions
54
 
55
+ - PEFT 0.7.1
56
+ - Transformers 4.36.2
57
+ - Pytorch 2.1.0
58
  - Datasets 2.16.1
59
+ - Tokenizers 0.15.0