joelniklaus commited on
Commit
9b8032c
1 Parent(s): 8259868

Training in progress, step 200000

Browse files
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f75a3ad0bbbc86394f319daec4d3ced52c33745e4b975256e4f5448b0631f4e
3
  size 2693742553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bbd24173aeea56d78e78e151644d5a6c20eff84cb5c57a057fb98e9cd7b078b
3
  size 2693742553
last-checkpoint/pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f669cb82275e0b9b619df35806db65345d133b7785b7d3fe367dd767f9c5fce
3
  size 1346893675
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9081077586c74f539540e0c24e338073ebf222d00fd0936904159c6a88f7f7a4
3
  size 1346893675
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_4.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_5.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_6.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/rng_state_7.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de3357f42c5b073bd69fff2091011011a109b6f0c27bc28d8bb9c4b3ffaf8b3
3
  size 13611
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70fcc75723b727ad2ba0713e3d3d331437b52052527bd6e3d888535bf4ffc621
3
  size 13611
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:04ad030a150fa0d8eb5e5920300951e9645dc85319159f9dd4f177aff4c5b722
3
  size 623
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be1ccf49f4804619cd7d22b74b595a694a368e629a10492b4089d6536d07bdf2
3
  size 623
last-checkpoint/trainer_state.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.15,
5
- "global_step": 150000,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
@@ -930,11 +930,319 @@
930
  "eval_samples_per_second": 255.796,
931
  "eval_steps_per_second": 4.042,
932
  "step": 150000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
933
  }
934
  ],
935
  "max_steps": 1000000,
936
  "num_train_epochs": 9223372036854775807,
937
- "total_flos": 8.9474847473664e+18,
938
  "trial_name": null,
939
  "trial_params": null
940
  }
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.2,
5
+ "global_step": 200000,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
 
930
  "eval_samples_per_second": 255.796,
931
  "eval_steps_per_second": 4.042,
932
  "step": 150000
933
+ },
934
+ {
935
+ "epoch": 0.15,
936
+ "learning_rate": 9.723691552302562e-05,
937
+ "loss": 0.9099,
938
+ "step": 151000
939
+ },
940
+ {
941
+ "epoch": 0.15,
942
+ "learning_rate": 9.718245238567939e-05,
943
+ "loss": 0.8585,
944
+ "step": 152000
945
+ },
946
+ {
947
+ "epoch": 0.15,
948
+ "learning_rate": 9.712747326859315e-05,
949
+ "loss": 0.8351,
950
+ "step": 153000
951
+ },
952
+ {
953
+ "epoch": 0.15,
954
+ "learning_rate": 9.707197877300974e-05,
955
+ "loss": 0.7363,
956
+ "step": 154000
957
+ },
958
+ {
959
+ "epoch": 0.15,
960
+ "learning_rate": 9.701596950580806e-05,
961
+ "loss": 0.701,
962
+ "step": 155000
963
+ },
964
+ {
965
+ "epoch": 0.16,
966
+ "learning_rate": 9.695944607949649e-05,
967
+ "loss": 0.8942,
968
+ "step": 156000
969
+ },
970
+ {
971
+ "epoch": 0.16,
972
+ "learning_rate": 9.690240911220618e-05,
973
+ "loss": 0.928,
974
+ "step": 157000
975
+ },
976
+ {
977
+ "epoch": 0.16,
978
+ "learning_rate": 9.684485922768422e-05,
979
+ "loss": 0.8611,
980
+ "step": 158000
981
+ },
982
+ {
983
+ "epoch": 0.16,
984
+ "learning_rate": 9.6786797055287e-05,
985
+ "loss": 0.7645,
986
+ "step": 159000
987
+ },
988
+ {
989
+ "epoch": 0.16,
990
+ "learning_rate": 9.672822322997305e-05,
991
+ "loss": 0.8364,
992
+ "step": 160000
993
+ },
994
+ {
995
+ "epoch": 0.16,
996
+ "learning_rate": 9.66691383922964e-05,
997
+ "loss": 0.9962,
998
+ "step": 161000
999
+ },
1000
+ {
1001
+ "epoch": 0.16,
1002
+ "learning_rate": 9.660954318839933e-05,
1003
+ "loss": 1.0221,
1004
+ "step": 162000
1005
+ },
1006
+ {
1007
+ "epoch": 0.16,
1008
+ "learning_rate": 9.654943827000548e-05,
1009
+ "loss": 0.9659,
1010
+ "step": 163000
1011
+ },
1012
+ {
1013
+ "epoch": 0.16,
1014
+ "learning_rate": 9.648882429441257e-05,
1015
+ "loss": 0.8713,
1016
+ "step": 164000
1017
+ },
1018
+ {
1019
+ "epoch": 0.17,
1020
+ "learning_rate": 9.642770192448536e-05,
1021
+ "loss": 0.821,
1022
+ "step": 165000
1023
+ },
1024
+ {
1025
+ "epoch": 0.17,
1026
+ "learning_rate": 9.636607182864827e-05,
1027
+ "loss": 0.9402,
1028
+ "step": 166000
1029
+ },
1030
+ {
1031
+ "epoch": 0.17,
1032
+ "learning_rate": 9.630393468087818e-05,
1033
+ "loss": 0.9458,
1034
+ "step": 167000
1035
+ },
1036
+ {
1037
+ "epoch": 0.17,
1038
+ "learning_rate": 9.624129116069694e-05,
1039
+ "loss": 0.8473,
1040
+ "step": 168000
1041
+ },
1042
+ {
1043
+ "epoch": 0.17,
1044
+ "learning_rate": 9.617814195316411e-05,
1045
+ "loss": 0.6521,
1046
+ "step": 169000
1047
+ },
1048
+ {
1049
+ "epoch": 0.17,
1050
+ "learning_rate": 9.611448774886924e-05,
1051
+ "loss": 0.7333,
1052
+ "step": 170000
1053
+ },
1054
+ {
1055
+ "epoch": 0.17,
1056
+ "learning_rate": 9.605032924392457e-05,
1057
+ "loss": 0.8378,
1058
+ "step": 171000
1059
+ },
1060
+ {
1061
+ "epoch": 0.17,
1062
+ "learning_rate": 9.598566713995718e-05,
1063
+ "loss": 0.9204,
1064
+ "step": 172000
1065
+ },
1066
+ {
1067
+ "epoch": 0.17,
1068
+ "learning_rate": 9.59205021441015e-05,
1069
+ "loss": 0.8255,
1070
+ "step": 173000
1071
+ },
1072
+ {
1073
+ "epoch": 0.17,
1074
+ "learning_rate": 9.58548349689915e-05,
1075
+ "loss": 0.7353,
1076
+ "step": 174000
1077
+ },
1078
+ {
1079
+ "epoch": 0.17,
1080
+ "learning_rate": 9.578866633275288e-05,
1081
+ "loss": 0.7614,
1082
+ "step": 175000
1083
+ },
1084
+ {
1085
+ "epoch": 0.18,
1086
+ "learning_rate": 9.572199695899522e-05,
1087
+ "loss": 0.9466,
1088
+ "step": 176000
1089
+ },
1090
+ {
1091
+ "epoch": 0.18,
1092
+ "learning_rate": 9.565482757680415e-05,
1093
+ "loss": 1.0166,
1094
+ "step": 177000
1095
+ },
1096
+ {
1097
+ "epoch": 0.18,
1098
+ "learning_rate": 9.558715892073323e-05,
1099
+ "loss": 0.8979,
1100
+ "step": 178000
1101
+ },
1102
+ {
1103
+ "epoch": 0.18,
1104
+ "learning_rate": 9.551899173079607e-05,
1105
+ "loss": 0.8246,
1106
+ "step": 179000
1107
+ },
1108
+ {
1109
+ "epoch": 0.18,
1110
+ "learning_rate": 9.545032675245813e-05,
1111
+ "loss": 0.9446,
1112
+ "step": 180000
1113
+ },
1114
+ {
1115
+ "epoch": 0.18,
1116
+ "learning_rate": 9.538116473662861e-05,
1117
+ "loss": 0.9918,
1118
+ "step": 181000
1119
+ },
1120
+ {
1121
+ "epoch": 0.18,
1122
+ "learning_rate": 9.531150643965223e-05,
1123
+ "loss": 0.9424,
1124
+ "step": 182000
1125
+ },
1126
+ {
1127
+ "epoch": 0.18,
1128
+ "learning_rate": 9.524135262330098e-05,
1129
+ "loss": 0.8325,
1130
+ "step": 183000
1131
+ },
1132
+ {
1133
+ "epoch": 0.18,
1134
+ "learning_rate": 9.517070405476575e-05,
1135
+ "loss": 0.737,
1136
+ "step": 184000
1137
+ },
1138
+ {
1139
+ "epoch": 0.18,
1140
+ "learning_rate": 9.509956150664796e-05,
1141
+ "loss": 0.7896,
1142
+ "step": 185000
1143
+ },
1144
+ {
1145
+ "epoch": 0.19,
1146
+ "learning_rate": 9.502792575695112e-05,
1147
+ "loss": 0.8106,
1148
+ "step": 186000
1149
+ },
1150
+ {
1151
+ "epoch": 0.19,
1152
+ "learning_rate": 9.49557975890723e-05,
1153
+ "loss": 0.8712,
1154
+ "step": 187000
1155
+ },
1156
+ {
1157
+ "epoch": 0.19,
1158
+ "learning_rate": 9.488317779179361e-05,
1159
+ "loss": 0.732,
1160
+ "step": 188000
1161
+ },
1162
+ {
1163
+ "epoch": 0.19,
1164
+ "learning_rate": 9.481006715927351e-05,
1165
+ "loss": 0.708,
1166
+ "step": 189000
1167
+ },
1168
+ {
1169
+ "epoch": 0.19,
1170
+ "learning_rate": 9.473646649103818e-05,
1171
+ "loss": 0.8834,
1172
+ "step": 190000
1173
+ },
1174
+ {
1175
+ "epoch": 0.19,
1176
+ "learning_rate": 9.46623765919727e-05,
1177
+ "loss": 0.8634,
1178
+ "step": 191000
1179
+ },
1180
+ {
1181
+ "epoch": 0.19,
1182
+ "learning_rate": 9.458779827231237e-05,
1183
+ "loss": 0.8771,
1184
+ "step": 192000
1185
+ },
1186
+ {
1187
+ "epoch": 0.19,
1188
+ "learning_rate": 9.451273234763371e-05,
1189
+ "loss": 0.8505,
1190
+ "step": 193000
1191
+ },
1192
+ {
1193
+ "epoch": 0.19,
1194
+ "learning_rate": 9.443717963884569e-05,
1195
+ "loss": 0.8172,
1196
+ "step": 194000
1197
+ },
1198
+ {
1199
+ "epoch": 0.2,
1200
+ "learning_rate": 9.43611409721806e-05,
1201
+ "loss": 0.974,
1202
+ "step": 195000
1203
+ },
1204
+ {
1205
+ "epoch": 0.2,
1206
+ "learning_rate": 9.428461717918511e-05,
1207
+ "loss": 0.9992,
1208
+ "step": 196000
1209
+ },
1210
+ {
1211
+ "epoch": 0.2,
1212
+ "learning_rate": 9.420760909671118e-05,
1213
+ "loss": 0.9623,
1214
+ "step": 197000
1215
+ },
1216
+ {
1217
+ "epoch": 0.2,
1218
+ "learning_rate": 9.413011756690685e-05,
1219
+ "loss": 0.7999,
1220
+ "step": 198000
1221
+ },
1222
+ {
1223
+ "epoch": 0.2,
1224
+ "learning_rate": 9.405214343720707e-05,
1225
+ "loss": 0.7812,
1226
+ "step": 199000
1227
+ },
1228
+ {
1229
+ "epoch": 0.2,
1230
+ "learning_rate": 9.397368756032445e-05,
1231
+ "loss": 0.8985,
1232
+ "step": 200000
1233
+ },
1234
+ {
1235
+ "epoch": 0.2,
1236
+ "eval_loss": 0.5022637844085693,
1237
+ "eval_runtime": 30.1991,
1238
+ "eval_samples_per_second": 165.568,
1239
+ "eval_steps_per_second": 2.616,
1240
+ "step": 200000
1241
  }
1242
  ],
1243
  "max_steps": 1000000,
1244
  "num_train_epochs": 9223372036854775807,
1245
+ "total_flos": 1.19299796631552e+19,
1246
  "trial_name": null,
1247
  "trial_params": null
1248
  }
last-checkpoint/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fa427adbf1e14fe0eefcb4a58e040b236477427ae7fd6324a110fa55e0702bc
3
  size 3439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0b02673b13f0fc59a49044f6f1aa1cfe1a6854d2087f76c0de0776564c78579
3
  size 3439
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f669cb82275e0b9b619df35806db65345d133b7785b7d3fe367dd767f9c5fce
3
  size 1346893675
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9081077586c74f539540e0c24e338073ebf222d00fd0936904159c6a88f7f7a4
3
  size 1346893675
runs/Jan16_18-30-00_t1v-n-eeadb94b-w-0/events.out.tfevents.1673893828.t1v-n-eeadb94b-w-0.17635.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a04e8e14cc6239ce59e60d577a447f139386cfb8dae87eb0dee12ba92ba63ec0
3
- size 28560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efce05a66f6ca6330055c2751b1a073fbe1922409df6942ed15ce1d1b458a518
3
+ size 33400
runs/Jan25_00-38-28_t1v-n-eeadb94b-w-0/1674607140.3423548/events.out.tfevents.1674607140.t1v-n-eeadb94b-w-0.3735135.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0379aa0b99e105cbae5fb23257b7debea8c0e696c2d73b88c126071e89b7af35
3
+ size 5446
runs/Jan25_00-38-28_t1v-n-eeadb94b-w-0/events.out.tfevents.1674607140.t1v-n-eeadb94b-w-0.3735135.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b6e3bd1ac857641f5e31dac45af4e919f8f72f8c0a7cfdb365c1e84a79d2a4d
3
+ size 12056
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fa427adbf1e14fe0eefcb4a58e040b236477427ae7fd6324a110fa55e0702bc
3
  size 3439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0b02673b13f0fc59a49044f6f1aa1cfe1a6854d2087f76c0de0776564c78579
3
  size 3439