deuswoof commited on
Commit
45b87f5
1 Parent(s): b5be070

Training in progress, step 10

Browse files
26_10_23_config_test.csv CHANGED
@@ -5,4 +5,5 @@ run_number,comment,peformed_already,num_train_epochs,max_tokens,temperature,stop
5
  4,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
6
  5,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
7
  6,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
8
- 7,classification_of_valuems set True,False,2,100,0.8,False,True,False,False
 
 
5
  4,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
6
  5,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
7
  6,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
8
+ 7,classification_of_valuems set True,True,2,100,0.8,False,True,False,False
9
+ 8,classification_of_valuems set True,False,2,100,0.8,False,True,False,False
26_10_23_results_real_3.csv CHANGED
@@ -5,4 +5,4 @@ run_number,items_per_minute,changed_settings,total_time_taken,rouge_scores_unnes
5
  4,1525.602524101584,classification_of_valuems set True,17.304638385772705,0,0.3449559795118336,0.3423341666457499,0.3223067218438548,0.3962323417858379,0.3934280156800367,0.3705820791169619,0.4480554100301472,0.4472379094634592,0.4191781582462838,0.1924640322240557,0.1864412220397102,0.1791588870738077,0.2322731554369229,0.2249807377601852,0.2130668974234288,0.275524472174082,0.267281230315844,0.2520826707151871,0.25,0.25,0.25
6
  5,1526.1514943613795,lemmatization set True stemming set True classification_of_valuems set True,17.29841375350952,0,0.2740090800292036,0.3255265414032295,0.2775650458916542,0.3242606066397121,0.382074138505741,0.3267769081728278,0.3772870132781871,0.4458722947774239,0.3791647882107381,0.1589738558026723,0.1948121530040918,0.163589425650448,0.197346401556911,0.2400173323974756,0.2012985648757239,0.2417203208425002,0.291225773302693,0.2448890209857868,0.5,0.5,0.5
7
  6,1530.684509829563,lemmatization set True classification_of_valuems set True,17.247185707092285,0,0.2089967577635823,0.2539863482828974,0.2180593819184433,0.2566736342083835,0.3067268601430931,0.2634860133658033,0.3083760408920908,0.3641939637052689,0.3143688188949664,0.1022732513904408,0.1231935696888519,0.1061505173202719,0.1366654941744046,0.1619876288426343,0.1387847257382091,0.1783473709510714,0.2101453928900783,0.1797982054917218,0.4999999999999998,0.4999999999999998,0.4999999999999998
8
- 9,1537.4830854419854,lemmatization set True stemming set True classification_of_valuems set True,17.170920610427856,0,0.3137446610565907,0.346938031739325,0.3116890258549815,0.36716366175121,0.4078239534610949,0.3658593356405938,0.4216956378639542,0.4672338475399563,0.4174884317080075,0.1868318428239036,0.2090565062942769,0.1872856926409339,0.2333035163819085,0.2570762720696271,0.2307891585347144,0.2805117716175197,0.3026827885812185,0.2738993050878683,0.25,0.25,0.25
 
5
  4,1525.602524101584,classification_of_valuems set True,17.304638385772705,0,0.3449559795118336,0.3423341666457499,0.3223067218438548,0.3962323417858379,0.3934280156800367,0.3705820791169619,0.4480554100301472,0.4472379094634592,0.4191781582462838,0.1924640322240557,0.1864412220397102,0.1791588870738077,0.2322731554369229,0.2249807377601852,0.2130668974234288,0.275524472174082,0.267281230315844,0.2520826707151871,0.25,0.25,0.25
6
  5,1526.1514943613795,lemmatization set True stemming set True classification_of_valuems set True,17.29841375350952,0,0.2740090800292036,0.3255265414032295,0.2775650458916542,0.3242606066397121,0.382074138505741,0.3267769081728278,0.3772870132781871,0.4458722947774239,0.3791647882107381,0.1589738558026723,0.1948121530040918,0.163589425650448,0.197346401556911,0.2400173323974756,0.2012985648757239,0.2417203208425002,0.291225773302693,0.2448890209857868,0.5,0.5,0.5
7
  6,1530.684509829563,lemmatization set True classification_of_valuems set True,17.247185707092285,0,0.2089967577635823,0.2539863482828974,0.2180593819184433,0.2566736342083835,0.3067268601430931,0.2634860133658033,0.3083760408920908,0.3641939637052689,0.3143688188949664,0.1022732513904408,0.1231935696888519,0.1061505173202719,0.1366654941744046,0.1619876288426343,0.1387847257382091,0.1783473709510714,0.2101453928900783,0.1797982054917218,0.4999999999999998,0.4999999999999998,0.4999999999999998
8
+ 10,1537.4830854419854,lemmatization set True stemming set True classification_of_valuems set True,17.170920610427856,0,0.3137446610565907,0.346938031739325,0.3116890258549815,0.36716366175121,0.4078239534610949,0.3658593356405938,0.4216956378639542,0.4672338475399563,0.4174884317080075,0.1868318428239036,0.2090565062942769,0.1872856926409339,0.2333035163819085,0.2570762720696271,0.2307891585347144,0.2805117716175197,0.3026827885812185,0.2738993050878683,0.25,0.25,0.25
26_10_23_results_test.csv CHANGED
@@ -7,4 +7,5 @@ run_number,items_per_minute,changed_settings,total_time_taken,rouge_scores_unnes
7
  0,1524.8745306877388,classification_of_valuems set True,17.312899827957153,0,0.1523559158621903,0.2402063075738076,0.1711786154828841,0.1879905787977005,0.2725423044286807,0.2052492305829349,0.2249142701073362,0.3101320775218208,0.2376186625911425,0.0456612013757211,0.061517705140545,0.0486773374221998,0.064538975391185,0.08457436273416,0.0659237042923236,0.0864961253662038,0.1081372741329041,0.0849530450555745,0.5,0.5,0.5
8
  0,1530.8284721565442,classification_of_valuems set True,17.245563745498657,0,0.1835189445382876,0.2841207438961418,0.2112056166511617,0.2178189965285001,0.3192970863671794,0.2472962612883028,0.2545664417741938,0.3541988923842352,0.2811301451542974,0.0548828059844037,0.0718096062377719,0.060413755710315,0.0712374843623777,0.0926737949778724,0.0779697186650809,0.0904927939124252,0.1151478474543225,0.0988436189356827,0.4999999999999999,0.4999999999999999,0.4999999999999999
9
  0,1519.4253778075708,classification_of_valuems set True,17.37498950958252,0,0.2448062646628152,0.3469024393843046,0.2689767055556361,0.2839292845493338,0.3847744667974158,0.3046270148335429,0.3231436457836793,0.4233519343131728,0.3403500268435545,0.0988042176523105,0.1314341849392717,0.1053886873309759,0.1221408031923088,0.161322194104573,0.1288816368026844,0.1480071736130433,0.1914677474964019,0.1538736199513103,0.5000000000000002,0.5000000000000002,0.5000000000000002
 
10
  0,0.0,0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
 
7
  0,1524.8745306877388,classification_of_valuems set True,17.312899827957153,0,0.1523559158621903,0.2402063075738076,0.1711786154828841,0.1879905787977005,0.2725423044286807,0.2052492305829349,0.2249142701073362,0.3101320775218208,0.2376186625911425,0.0456612013757211,0.061517705140545,0.0486773374221998,0.064538975391185,0.08457436273416,0.0659237042923236,0.0864961253662038,0.1081372741329041,0.0849530450555745,0.5,0.5,0.5
8
  0,1530.8284721565442,classification_of_valuems set True,17.245563745498657,0,0.1835189445382876,0.2841207438961418,0.2112056166511617,0.2178189965285001,0.3192970863671794,0.2472962612883028,0.2545664417741938,0.3541988923842352,0.2811301451542974,0.0548828059844037,0.0718096062377719,0.060413755710315,0.0712374843623777,0.0926737949778724,0.0779697186650809,0.0904927939124252,0.1151478474543225,0.0988436189356827,0.4999999999999999,0.4999999999999999,0.4999999999999999
9
  0,1519.4253778075708,classification_of_valuems set True,17.37498950958252,0,0.2448062646628152,0.3469024393843046,0.2689767055556361,0.2839292845493338,0.3847744667974158,0.3046270148335429,0.3231436457836793,0.4233519343131728,0.3403500268435545,0.0988042176523105,0.1314341849392717,0.1053886873309759,0.1221408031923088,0.161322194104573,0.1288816368026844,0.1480071736130433,0.1914677474964019,0.1538736199513103,0.5000000000000002,0.5000000000000002,0.5000000000000002
10
+ 0,1534.4169599085549,classification_of_valuems set True,17.2052321434021,0,0.2302730213229578,0.3219961939226703,0.2531060186731637,0.2717769025648489,0.3726734239350072,0.2946711204360011,0.3123765700487656,0.4172674335113968,0.3315270276187541,0.0942940049125073,0.1271350898134716,0.101126848822042,0.122824934183027,0.157562640377479,0.129838907479759,0.151774580889469,0.1937226258515307,0.158954825883685,0.5,0.5,0.5
11
  0,0.0,0,0.0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
README.md CHANGED
@@ -964,6 +964,18 @@ The following `bitsandbytes` quantization config was used during training:
964
  - bnb_4bit_use_double_quant: True
965
  - bnb_4bit_compute_dtype: bfloat16
966
 
 
 
 
 
 
 
 
 
 
 
 
 
967
  The following `bitsandbytes` quantization config was used during training:
968
  - quant_method: bitsandbytes
969
  - load_in_8bit: False
@@ -1057,5 +1069,6 @@ The following `bitsandbytes` quantization config was used during training:
1057
  - PEFT 0.5.0
1058
  - PEFT 0.5.0
1059
  - PEFT 0.5.0
 
1060
 
1061
  - PEFT 0.5.0
 
964
  - bnb_4bit_use_double_quant: True
965
  - bnb_4bit_compute_dtype: bfloat16
966
 
967
+ The following `bitsandbytes` quantization config was used during training:
968
+ - quant_method: bitsandbytes
969
+ - load_in_8bit: False
970
+ - load_in_4bit: True
971
+ - llm_int8_threshold: 6.0
972
+ - llm_int8_skip_modules: None
973
+ - llm_int8_enable_fp32_cpu_offload: False
974
+ - llm_int8_has_fp16_weight: False
975
+ - bnb_4bit_quant_type: nf4
976
+ - bnb_4bit_use_double_quant: True
977
+ - bnb_4bit_compute_dtype: bfloat16
978
+
979
  The following `bitsandbytes` quantization config was used during training:
980
  - quant_method: bitsandbytes
981
  - load_in_8bit: False
 
1069
  - PEFT 0.5.0
1070
  - PEFT 0.5.0
1071
  - PEFT 0.5.0
1072
+ - PEFT 0.5.0
1073
 
1074
  - PEFT 0.5.0
adapter_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "tiiuae/falcon-rw-1b",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
 
1
  {
2
  "auto_mapping": null,
3
+ "base_model_name_or_path": "tiiuae/falcon-7b",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c7c413039edc99f37322a0fb24b61baa3ac4182a50ce358ae8df84cc0b1df58e
3
- size 100733709
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d5521d0dc5a0cbd51ef766a435be5a79dc06ddc478a35eae0550dc713af1c93
3
+ size 261189453
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a0db11243e5213810db7b77dc7935e9a727eea52a14f6a40e0ce27679d1e654c
3
  size 100690288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae5f7148db11fd54683f3c602f52e502a9d8996da1d3dc603e68626fb1a0651b
3
  size 100690288
special_tokens_map.json CHANGED
@@ -1,6 +1,17 @@
1
  {
2
- "bos_token": "<|endoftext|>",
 
 
 
 
 
 
 
 
 
 
 
 
3
  "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "unk_token": "<|endoftext|>"
6
  }
 
1
  {
2
+ "additional_special_tokens": [
3
+ ">>TITLE<<",
4
+ ">>ABSTRACT<<",
5
+ ">>INTRODUCTION<<",
6
+ ">>SUMMARY<<",
7
+ ">>COMMENT<<",
8
+ ">>ANSWER<<",
9
+ ">>QUESTION<<",
10
+ ">>DOMAIN<<",
11
+ ">>PREFIX<<",
12
+ ">>SUFFIX<<",
13
+ ">>MIDDLE<<"
14
+ ],
15
  "eos_token": "<|endoftext|>",
16
+ "pad_token": "<|endoftext|>"
 
17
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,7 +1,95 @@
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "50256": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "content": "<|endoftext|>",
6
  "lstrip": false,
7
  "normalized": false,
@@ -10,11 +98,26 @@
10
  "special": true
11
  }
12
  },
13
- "bos_token": "<|endoftext|>",
 
 
 
 
 
 
 
 
 
 
 
 
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
- "model_max_length": 1024,
 
 
 
 
17
  "pad_token": "<|endoftext|>",
18
- "tokenizer_class": "GPT2Tokenizer",
19
- "unk_token": "<|endoftext|>"
20
  }
 
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
+ "0": {
5
+ "content": ">>TITLE<<",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": ">>ABSTRACT<<",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": ">>INTRODUCTION<<",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": ">>SUMMARY<<",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": ">>COMMENT<<",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": ">>ANSWER<<",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": ">>QUESTION<<",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": ">>DOMAIN<<",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": ">>PREFIX<<",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": ">>SUFFIX<<",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": ">>MIDDLE<<",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
  "content": "<|endoftext|>",
94
  "lstrip": false,
95
  "normalized": false,
 
98
  "special": true
99
  }
100
  },
101
+ "additional_special_tokens": [
102
+ ">>TITLE<<",
103
+ ">>ABSTRACT<<",
104
+ ">>INTRODUCTION<<",
105
+ ">>SUMMARY<<",
106
+ ">>COMMENT<<",
107
+ ">>ANSWER<<",
108
+ ">>QUESTION<<",
109
+ ">>DOMAIN<<",
110
+ ">>PREFIX<<",
111
+ ">>SUFFIX<<",
112
+ ">>MIDDLE<<"
113
+ ],
114
  "clean_up_tokenization_spaces": true,
115
  "eos_token": "<|endoftext|>",
116
+ "model_input_names": [
117
+ "input_ids",
118
+ "attention_mask"
119
+ ],
120
+ "model_max_length": 2048,
121
  "pad_token": "<|endoftext|>",
122
+ "tokenizer_class": "PreTrainedTokenizerFast"
 
123
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:de81a9b53f31479f45e0d3951a33c2bff7ee7362fe634aac9c0438b9164db6ce
3
  size 4283
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d44904d4f28af8a9f09198e70fae4bb2617a43ca6ca5c1dabf4b9f141cad2df8
3
  size 4283