deuswoof commited on
Commit
1918038
1 Parent(s): a0a7378

Training in progress, step 10

Browse files
24_10_23_config_test_5.csv CHANGED
@@ -7,6 +7,6 @@ run_number,comment,peformed_already,num_train_epochs,max_tokens,temperature,stop
7
  6,temperature set 0.95,True,2,100,0.95,False
8
  7,max_tokens set 10,True,2,10,0.8,False
9
  8,max_tokens set 30,True,2,30,0.8,False
10
- 9,max_tokens set 300,False,2,300,0.8,False
11
  10,max_tokens set 500,False,2,500,0.8,False
12
  11,stop_token set True,False,2,100,0.8,True
 
7
  6,temperature set 0.95,True,2,100,0.95,False
8
  7,max_tokens set 10,True,2,10,0.8,False
9
  8,max_tokens set 30,True,2,30,0.8,False
10
+ 9,max_tokens set 300,True,2,300,0.8,False
11
  10,max_tokens set 500,False,2,500,0.8,False
12
  11,stop_token set True,False,2,100,0.8,True
24_10_23_results_real.csv CHANGED
@@ -9,4 +9,5 @@ run_number,items_per_minute,changed_settings,total_time_taken,rouge_scores_unnes
9
  8,1522.6253025648766,temperature set 0.95,17.33847451210022,0,0.1358318375162381,0.2367233918316699,0.1608590624915817,0.1684281758375314,0.2753995807236896,0.1957692397202891,0.2017753107078927,0.3158922152664998,0.231842660328347,0.0348253941650094,0.053243827263793,0.0397742520737331,0.0504060301453853,0.0741161939034151,0.0569579405303325,0.0665651758039863,0.0972012308295898,0.0754698873782298
10
  9,8137.253778897434,max_tokens set 10,3.244337797164917,0,0.1297367298173749,0.0248065965409596,0.0394931695780917,0.1776028332479944,0.0319881335779764,0.0503325526395891,0.2305753968253968,0.0398045284176508,0.0623919220801066,0.021505376344086,0.0012679576629888,0.0022182435535744,0.0615292712066905,0.003434056688616,0.0059725196216856,0.1129256272401433,0.0066200549176752,0.0109573554493896
11
  10,4535.835024326586,max_tokens set 30,5.820317506790161,0,0.2422416346899768,0.0977982875121211,0.1325246370842857,0.2880002239989636,0.1154357218367962,0.1561017908481051,0.339886078686101,0.1336154477253922,0.1801894525067171,0.072104162735737,0.0225936047273112,0.033723979023946,0.10640965444612,0.0328642617088289,0.0488049972899845,0.1415701399984359,0.0435389639524036,0.0644875164681592
12
- 11,0.0,0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
 
 
9
  8,1522.6253025648766,temperature set 0.95,17.33847451210022,0,0.1358318375162381,0.2367233918316699,0.1608590624915817,0.1684281758375314,0.2753995807236896,0.1957692397202891,0.2017753107078927,0.3158922152664998,0.231842660328347,0.0348253941650094,0.053243827263793,0.0397742520737331,0.0504060301453853,0.0741161939034151,0.0569579405303325,0.0665651758039863,0.0972012308295898,0.0754698873782298
10
  9,8137.253778897434,max_tokens set 10,3.244337797164917,0,0.1297367298173749,0.0248065965409596,0.0394931695780917,0.1776028332479944,0.0319881335779764,0.0503325526395891,0.2305753968253968,0.0398045284176508,0.0623919220801066,0.021505376344086,0.0012679576629888,0.0022182435535744,0.0615292712066905,0.003434056688616,0.0059725196216856,0.1129256272401433,0.0066200549176752,0.0109573554493896
11
  10,4535.835024326586,max_tokens set 30,5.820317506790161,0,0.2422416346899768,0.0977982875121211,0.1325246370842857,0.2880002239989636,0.1154357218367962,0.1561017908481051,0.339886078686101,0.1336154477253922,0.1801894525067171,0.072104162735737,0.0225936047273112,0.033723979023946,0.10640965444612,0.0328642617088289,0.0488049972899845,0.1415701399984359,0.0435389639524036,0.0644875164681592
12
+ 11,414.6348899218345,max_tokens set 300,63.6704740524292,0,0.1036658616791158,0.4509069403064212,0.1604171913717259,0.1231034900935985,0.4927653826230481,0.1876565027825787,0.1450621467503176,0.5346234029929966,0.2163978589006886,0.0448793224180538,0.1701044771154594,0.0683517341604418,0.0570134552924645,0.2053617015027369,0.0863401271991256,0.0714855259069783,0.2472927214590566,0.1070059828658844
13
+ 12,0.0,0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
README.md CHANGED
@@ -544,6 +544,18 @@ The following `bitsandbytes` quantization config was used during training:
544
  - bnb_4bit_use_double_quant: True
545
  - bnb_4bit_compute_dtype: bfloat16
546
 
 
 
 
 
 
 
 
 
 
 
 
 
547
  The following `bitsandbytes` quantization config was used during training:
548
  - quant_method: bitsandbytes
549
  - load_in_8bit: False
@@ -602,5 +614,6 @@ The following `bitsandbytes` quantization config was used during training:
602
  - PEFT 0.5.0
603
  - PEFT 0.5.0
604
  - PEFT 0.5.0
 
605
 
606
  - PEFT 0.5.0
 
544
  - bnb_4bit_use_double_quant: True
545
  - bnb_4bit_compute_dtype: bfloat16
546
 
547
+ The following `bitsandbytes` quantization config was used during training:
548
+ - quant_method: bitsandbytes
549
+ - load_in_8bit: False
550
+ - load_in_4bit: True
551
+ - llm_int8_threshold: 6.0
552
+ - llm_int8_skip_modules: None
553
+ - llm_int8_enable_fp32_cpu_offload: False
554
+ - llm_int8_has_fp16_weight: False
555
+ - bnb_4bit_quant_type: nf4
556
+ - bnb_4bit_use_double_quant: True
557
+ - bnb_4bit_compute_dtype: bfloat16
558
+
559
  The following `bitsandbytes` quantization config was used during training:
560
  - quant_method: bitsandbytes
561
  - load_in_8bit: False
 
614
  - PEFT 0.5.0
615
  - PEFT 0.5.0
616
  - PEFT 0.5.0
617
+ - PEFT 0.5.0
618
 
619
  - PEFT 0.5.0
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0dcbc8beb7e07843aa8571b6bc1d5a2e7afb5c9d56af37f6d3e80f18d0d849c2
3
  size 100733709
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:006d5339621965f94bea679d5067ed094422404fe40e35a1b5ee209d639a4af3
3
  size 100733709
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:60ce81dfc4c6e390510baa0a253c9daf0ad5c7354c9152e7d0b748c7fb6eb8b1
3
  size 100690288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:007ecd89f0aa8794ead1f5f9d1cf00b2f12b52f91eeea115b04821a9bf1d5c79
3
  size 100690288
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57067655398e3a7e24c43c0422197d5137b7fca6e0886ae4d6e05b55472bb0a8
3
  size 4283
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcc3eab1d36907713527d9a6f8f88999750f520284dd62ba3536fb9eccb57903
3
  size 4283