Training in progress, step 3226
Browse files- model-00001-of-00002.safetensors +1 -1
- model-00002-of-00002.safetensors +1 -1
- trainer_log.jsonl +219 -0
model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4965799096
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9bb04e058d8e98afbbe75d299787cae3f7b4066e0da083053834b09309c56b5c
|
3 |
size 4965799096
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1459729952
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:39540f2b5b03a27228b31ca49589cc0456ad40a4612127b988c9e278330231ab
|
3 |
size 1459729952
|
trainer_log.jsonl
CHANGED
@@ -3006,3 +3006,222 @@
|
|
3006 |
{"current_steps": 3006, "total_steps": 3226, "loss": 0.677, "learning_rate": 4.667140791411728e-07, "epoch": 0.9317679879111938, "percentage": 93.18, "elapsed_time": "15:20:34", "remaining_time": "1:07:22"}
|
3007 |
{"current_steps": 3007, "total_steps": 3226, "loss": 0.7081, "learning_rate": 4.624972584479581e-07, "epoch": 0.9320779573017165, "percentage": 93.21, "elapsed_time": "15:20:52", "remaining_time": "1:07:04"}
|
3008 |
{"current_steps": 3008, "total_steps": 3226, "loss": 0.7151, "learning_rate": 4.582993512121281e-07, "epoch": 0.9323879266922391, "percentage": 93.24, "elapsed_time": "15:21:11", "remaining_time": "1:06:45"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3006 |
{"current_steps": 3006, "total_steps": 3226, "loss": 0.677, "learning_rate": 4.667140791411728e-07, "epoch": 0.9317679879111938, "percentage": 93.18, "elapsed_time": "15:20:34", "remaining_time": "1:07:22"}
|
3007 |
{"current_steps": 3007, "total_steps": 3226, "loss": 0.7081, "learning_rate": 4.624972584479581e-07, "epoch": 0.9320779573017165, "percentage": 93.21, "elapsed_time": "15:20:52", "remaining_time": "1:07:04"}
|
3008 |
{"current_steps": 3008, "total_steps": 3226, "loss": 0.7151, "learning_rate": 4.582993512121281e-07, "epoch": 0.9323879266922391, "percentage": 93.24, "elapsed_time": "15:21:11", "remaining_time": "1:06:45"}
|
3009 |
+
{"current_steps": 3009, "total_steps": 3226, "loss": 0.6854, "learning_rate": 4.541203614975009e-07, "epoch": 0.9326978960827619, "percentage": 93.27, "elapsed_time": "15:21:29", "remaining_time": "1:06:27"}
|
3010 |
+
{"current_steps": 3010, "total_steps": 3226, "loss": 0.6997, "learning_rate": 4.499602933495961e-07, "epoch": 0.9330078654732845, "percentage": 93.3, "elapsed_time": "15:21:47", "remaining_time": "1:06:08"}
|
3011 |
+
{"current_steps": 3011, "total_steps": 3226, "loss": 0.7032, "learning_rate": 4.458191507955945e-07, "epoch": 0.9333178348638071, "percentage": 93.34, "elapsed_time": "15:22:06", "remaining_time": "1:05:50"}
|
3012 |
+
{"current_steps": 3012, "total_steps": 3226, "loss": 0.6975, "learning_rate": 4.4169693784437363e-07, "epoch": 0.9336278042543299, "percentage": 93.37, "elapsed_time": "15:22:24", "remaining_time": "1:05:32"}
|
3013 |
+
{"current_steps": 3013, "total_steps": 3226, "loss": 0.7153, "learning_rate": 4.3759365848647704e-07, "epoch": 0.9339377736448525, "percentage": 93.4, "elapsed_time": "15:22:43", "remaining_time": "1:05:13"}
|
3014 |
+
{"current_steps": 3014, "total_steps": 3226, "loss": 0.714, "learning_rate": 4.3350931669412066e-07, "epoch": 0.9342477430353753, "percentage": 93.43, "elapsed_time": "15:23:01", "remaining_time": "1:04:55"}
|
3015 |
+
{"current_steps": 3015, "total_steps": 3226, "loss": 0.7023, "learning_rate": 4.29443916421195e-07, "epoch": 0.9345577124258979, "percentage": 93.46, "elapsed_time": "15:23:19", "remaining_time": "1:04:37"}
|
3016 |
+
{"current_steps": 3016, "total_steps": 3226, "loss": 0.6771, "learning_rate": 4.2539746160323636e-07, "epoch": 0.9348676818164207, "percentage": 93.49, "elapsed_time": "15:23:38", "remaining_time": "1:04:18"}
|
3017 |
+
{"current_steps": 3017, "total_steps": 3226, "loss": 0.7176, "learning_rate": 4.213699561574602e-07, "epoch": 0.9351776512069433, "percentage": 93.52, "elapsed_time": "15:23:56", "remaining_time": "1:04:00"}
|
3018 |
+
{"current_steps": 3018, "total_steps": 3226, "loss": 0.7366, "learning_rate": 4.1736140398273004e-07, "epoch": 0.935487620597466, "percentage": 93.55, "elapsed_time": "15:24:14", "remaining_time": "1:03:41"}
|
3019 |
+
{"current_steps": 3019, "total_steps": 3226, "loss": 0.7028, "learning_rate": 4.133718089595595e-07, "epoch": 0.9357975899879887, "percentage": 93.58, "elapsed_time": "15:24:33", "remaining_time": "1:03:23"}
|
3020 |
+
{"current_steps": 3020, "total_steps": 3226, "loss": 0.7161, "learning_rate": 4.094011749501103e-07, "epoch": 0.9361075593785114, "percentage": 93.61, "elapsed_time": "15:24:51", "remaining_time": "1:03:05"}
|
3021 |
+
{"current_steps": 3021, "total_steps": 3226, "loss": 0.6954, "learning_rate": 4.0544950579819443e-07, "epoch": 0.9364175287690341, "percentage": 93.65, "elapsed_time": "15:25:09", "remaining_time": "1:02:46"}
|
3022 |
+
{"current_steps": 3022, "total_steps": 3226, "loss": 0.6963, "learning_rate": 4.015168053292584e-07, "epoch": 0.9367274981595567, "percentage": 93.68, "elapsed_time": "15:25:27", "remaining_time": "1:02:28"}
|
3023 |
+
{"current_steps": 3023, "total_steps": 3226, "loss": 0.7171, "learning_rate": 3.9760307735039027e-07, "epoch": 0.9370374675500794, "percentage": 93.71, "elapsed_time": "15:25:46", "remaining_time": "1:02:10"}
|
3024 |
+
{"current_steps": 3024, "total_steps": 3226, "loss": 0.7076, "learning_rate": 3.9370832565031045e-07, "epoch": 0.9373474369406021, "percentage": 93.74, "elapsed_time": "15:26:04", "remaining_time": "1:01:51"}
|
3025 |
+
{"current_steps": 3025, "total_steps": 3226, "loss": 0.7113, "learning_rate": 3.8983255399936747e-07, "epoch": 0.9376574063311248, "percentage": 93.77, "elapsed_time": "15:26:22", "remaining_time": "1:01:33"}
|
3026 |
+
{"current_steps": 3026, "total_steps": 3226, "loss": 0.7606, "learning_rate": 3.859757661495378e-07, "epoch": 0.9379673757216475, "percentage": 93.8, "elapsed_time": "15:26:41", "remaining_time": "1:01:14"}
|
3027 |
+
{"current_steps": 3027, "total_steps": 3226, "loss": 0.6914, "learning_rate": 3.821379658344215e-07, "epoch": 0.9382773451121702, "percentage": 93.83, "elapsed_time": "15:26:59", "remaining_time": "1:00:56"}
|
3028 |
+
{"current_steps": 3028, "total_steps": 3226, "loss": 0.7106, "learning_rate": 3.7831915676923347e-07, "epoch": 0.9385873145026928, "percentage": 93.86, "elapsed_time": "15:27:17", "remaining_time": "1:00:38"}
|
3029 |
+
{"current_steps": 3029, "total_steps": 3226, "loss": 0.6908, "learning_rate": 3.745193426508076e-07, "epoch": 0.9388972838932156, "percentage": 93.89, "elapsed_time": "15:27:36", "remaining_time": "1:00:19"}
|
3030 |
+
{"current_steps": 3030, "total_steps": 3226, "loss": 0.711, "learning_rate": 3.7073852715758804e-07, "epoch": 0.9392072532837382, "percentage": 93.92, "elapsed_time": "15:27:54", "remaining_time": "1:00:01"}
|
3031 |
+
{"current_steps": 3031, "total_steps": 3226, "loss": 0.7046, "learning_rate": 3.669767139496294e-07, "epoch": 0.939517222674261, "percentage": 93.96, "elapsed_time": "15:28:13", "remaining_time": "0:59:43"}
|
3032 |
+
{"current_steps": 3032, "total_steps": 3226, "loss": 0.7368, "learning_rate": 3.632339066685875e-07, "epoch": 0.9398271920647836, "percentage": 93.99, "elapsed_time": "15:28:31", "remaining_time": "0:59:24"}
|
3033 |
+
{"current_steps": 3033, "total_steps": 3226, "loss": 0.7038, "learning_rate": 3.5951010893772396e-07, "epoch": 0.9401371614553062, "percentage": 94.02, "elapsed_time": "15:28:49", "remaining_time": "0:59:06"}
|
3034 |
+
{"current_steps": 3034, "total_steps": 3226, "loss": 0.6896, "learning_rate": 3.5580532436189084e-07, "epoch": 0.940447130845829, "percentage": 94.05, "elapsed_time": "15:29:08", "remaining_time": "0:58:47"}
|
3035 |
+
{"current_steps": 3035, "total_steps": 3226, "loss": 0.6601, "learning_rate": 3.5211955652753925e-07, "epoch": 0.9407571002363516, "percentage": 94.08, "elapsed_time": "15:29:26", "remaining_time": "0:58:29"}
|
3036 |
+
{"current_steps": 3036, "total_steps": 3226, "loss": 0.6902, "learning_rate": 3.4845280900271506e-07, "epoch": 0.9410670696268744, "percentage": 94.11, "elapsed_time": "15:29:44", "remaining_time": "0:58:11"}
|
3037 |
+
{"current_steps": 3037, "total_steps": 3226, "loss": 0.7137, "learning_rate": 3.4480508533703884e-07, "epoch": 0.941377039017397, "percentage": 94.14, "elapsed_time": "15:30:03", "remaining_time": "0:57:52"}
|
3038 |
+
{"current_steps": 3038, "total_steps": 3226, "loss": 0.7202, "learning_rate": 3.4117638906173035e-07, "epoch": 0.9416870084079197, "percentage": 94.17, "elapsed_time": "15:30:21", "remaining_time": "0:57:34"}
|
3039 |
+
{"current_steps": 3039, "total_steps": 3226, "loss": 0.736, "learning_rate": 3.3756672368957746e-07, "epoch": 0.9419969777984424, "percentage": 94.2, "elapsed_time": "15:30:39", "remaining_time": "0:57:16"}
|
3040 |
+
{"current_steps": 3040, "total_steps": 3226, "loss": 0.6871, "learning_rate": 3.339760927149516e-07, "epoch": 0.9423069471889651, "percentage": 94.23, "elapsed_time": "15:30:58", "remaining_time": "0:56:57"}
|
3041 |
+
{"current_steps": 3041, "total_steps": 3226, "loss": 0.6929, "learning_rate": 3.304044996137967e-07, "epoch": 0.9426169165794878, "percentage": 94.27, "elapsed_time": "15:31:16", "remaining_time": "0:56:39"}
|
3042 |
+
{"current_steps": 3042, "total_steps": 3226, "loss": 0.7061, "learning_rate": 3.2685194784362053e-07, "epoch": 0.9429268859700105, "percentage": 94.3, "elapsed_time": "15:31:34", "remaining_time": "0:56:20"}
|
3043 |
+
{"current_steps": 3043, "total_steps": 3226, "loss": 0.7372, "learning_rate": 3.233184408435075e-07, "epoch": 0.9432368553605331, "percentage": 94.33, "elapsed_time": "15:31:53", "remaining_time": "0:56:02"}
|
3044 |
+
{"current_steps": 3044, "total_steps": 3226, "loss": 0.6959, "learning_rate": 3.198039820341015e-07, "epoch": 0.9435468247510558, "percentage": 94.36, "elapsed_time": "15:32:11", "remaining_time": "0:55:44"}
|
3045 |
+
{"current_steps": 3045, "total_steps": 3226, "loss": 0.6923, "learning_rate": 3.1630857481760535e-07, "epoch": 0.9438567941415785, "percentage": 94.39, "elapsed_time": "15:32:29", "remaining_time": "0:55:25"}
|
3046 |
+
{"current_steps": 3046, "total_steps": 3226, "loss": 0.7208, "learning_rate": 3.128322225777791e-07, "epoch": 0.9441667635321012, "percentage": 94.42, "elapsed_time": "15:32:48", "remaining_time": "0:55:07"}
|
3047 |
+
{"current_steps": 3047, "total_steps": 3226, "loss": 0.6981, "learning_rate": 3.0937492867993966e-07, "epoch": 0.9444767329226239, "percentage": 94.45, "elapsed_time": "15:33:06", "remaining_time": "0:54:48"}
|
3048 |
+
{"current_steps": 3048, "total_steps": 3226, "loss": 0.7187, "learning_rate": 3.059366964709498e-07, "epoch": 0.9447867023131465, "percentage": 94.48, "elapsed_time": "15:33:24", "remaining_time": "0:54:30"}
|
3049 |
+
{"current_steps": 3049, "total_steps": 3226, "loss": 0.7132, "learning_rate": 3.025175292792204e-07, "epoch": 0.9450966717036693, "percentage": 94.51, "elapsed_time": "15:33:43", "remaining_time": "0:54:12"}
|
3050 |
+
{"current_steps": 3050, "total_steps": 3226, "loss": 0.7151, "learning_rate": 2.9911743041471044e-07, "epoch": 0.9454066410941919, "percentage": 94.54, "elapsed_time": "15:34:01", "remaining_time": "0:53:53"}
|
3051 |
+
{"current_steps": 3051, "total_steps": 3226, "loss": 0.7257, "learning_rate": 2.957364031689136e-07, "epoch": 0.9457166104847147, "percentage": 94.58, "elapsed_time": "15:34:19", "remaining_time": "0:53:35"}
|
3052 |
+
{"current_steps": 3052, "total_steps": 3226, "loss": 0.6567, "learning_rate": 2.923744508148696e-07, "epoch": 0.9460265798752373, "percentage": 94.61, "elapsed_time": "15:34:37", "remaining_time": "0:53:17"}
|
3053 |
+
{"current_steps": 3053, "total_steps": 3226, "loss": 0.6948, "learning_rate": 2.8903157660713944e-07, "epoch": 0.94633654926576, "percentage": 94.64, "elapsed_time": "15:34:56", "remaining_time": "0:52:58"}
|
3054 |
+
{"current_steps": 3054, "total_steps": 3226, "loss": 0.7054, "learning_rate": 2.8570778378182786e-07, "epoch": 0.9466465186562827, "percentage": 94.67, "elapsed_time": "15:35:14", "remaining_time": "0:52:40"}
|
3055 |
+
{"current_steps": 3055, "total_steps": 3226, "loss": 0.6921, "learning_rate": 2.8240307555656097e-07, "epoch": 0.9469564880468053, "percentage": 94.7, "elapsed_time": "15:35:32", "remaining_time": "0:52:21"}
|
3056 |
+
{"current_steps": 3056, "total_steps": 3226, "loss": 0.6983, "learning_rate": 2.791174551304887e-07, "epoch": 0.9472664574373281, "percentage": 94.73, "elapsed_time": "15:35:51", "remaining_time": "0:52:03"}
|
3057 |
+
{"current_steps": 3057, "total_steps": 3226, "loss": 0.7151, "learning_rate": 2.758509256842934e-07, "epoch": 0.9475764268278507, "percentage": 94.76, "elapsed_time": "15:36:09", "remaining_time": "0:51:45"}
|
3058 |
+
{"current_steps": 3058, "total_steps": 3226, "loss": 0.6961, "learning_rate": 2.726034903801633e-07, "epoch": 0.9478863962183735, "percentage": 94.79, "elapsed_time": "15:36:27", "remaining_time": "0:51:26"}
|
3059 |
+
{"current_steps": 3059, "total_steps": 3226, "loss": 0.7107, "learning_rate": 2.693751523618104e-07, "epoch": 0.9481963656088961, "percentage": 94.82, "elapsed_time": "15:36:46", "remaining_time": "0:51:08"}
|
3060 |
+
{"current_steps": 3060, "total_steps": 3226, "loss": 0.6637, "learning_rate": 2.661659147544526e-07, "epoch": 0.9485063349994188, "percentage": 94.85, "elapsed_time": "15:37:04", "remaining_time": "0:50:50"}
|
3061 |
+
{"current_steps": 3061, "total_steps": 3226, "loss": 0.7054, "learning_rate": 2.6297578066482254e-07, "epoch": 0.9488163043899415, "percentage": 94.89, "elapsed_time": "15:37:22", "remaining_time": "0:50:31"}
|
3062 |
+
{"current_steps": 3062, "total_steps": 3226, "loss": 0.7103, "learning_rate": 2.598047531811654e-07, "epoch": 0.9491262737804642, "percentage": 94.92, "elapsed_time": "15:37:41", "remaining_time": "0:50:13"}
|
3063 |
+
{"current_steps": 3063, "total_steps": 3226, "loss": 0.7271, "learning_rate": 2.5665283537321227e-07, "epoch": 0.9494362431709868, "percentage": 94.95, "elapsed_time": "15:37:59", "remaining_time": "0:49:54"}
|
3064 |
+
{"current_steps": 3064, "total_steps": 3226, "loss": 0.7002, "learning_rate": 2.5352003029221584e-07, "epoch": 0.9497462125615096, "percentage": 94.98, "elapsed_time": "15:38:17", "remaining_time": "0:49:36"}
|
3065 |
+
{"current_steps": 3065, "total_steps": 3226, "loss": 0.7165, "learning_rate": 2.504063409709101e-07, "epoch": 0.9500561819520322, "percentage": 95.01, "elapsed_time": "15:38:36", "remaining_time": "0:49:18"}
|
3066 |
+
{"current_steps": 3066, "total_steps": 3226, "loss": 0.7133, "learning_rate": 2.473117704235328e-07, "epoch": 0.950366151342555, "percentage": 95.04, "elapsed_time": "15:38:54", "remaining_time": "0:48:59"}
|
3067 |
+
{"current_steps": 3067, "total_steps": 3226, "loss": 0.683, "learning_rate": 2.4423632164581213e-07, "epoch": 0.9506761207330776, "percentage": 95.07, "elapsed_time": "15:39:12", "remaining_time": "0:48:41"}
|
3068 |
+
{"current_steps": 3068, "total_steps": 3226, "loss": 0.6951, "learning_rate": 2.4117999761496205e-07, "epoch": 0.9509860901236002, "percentage": 95.1, "elapsed_time": "15:39:31", "remaining_time": "0:48:23"}
|
3069 |
+
{"current_steps": 3069, "total_steps": 3226, "loss": 0.7046, "learning_rate": 2.381428012896847e-07, "epoch": 0.951296059514123, "percentage": 95.13, "elapsed_time": "15:39:49", "remaining_time": "0:48:04"}
|
3070 |
+
{"current_steps": 3070, "total_steps": 3226, "loss": 0.7193, "learning_rate": 2.3512473561016823e-07, "epoch": 0.9516060289046456, "percentage": 95.16, "elapsed_time": "15:40:07", "remaining_time": "0:47:46"}
|
3071 |
+
{"current_steps": 3071, "total_steps": 3226, "loss": 0.7009, "learning_rate": 2.321258034980778e-07, "epoch": 0.9519159982951684, "percentage": 95.2, "elapsed_time": "15:40:26", "remaining_time": "0:47:27"}
|
3072 |
+
{"current_steps": 3072, "total_steps": 3226, "loss": 0.6846, "learning_rate": 2.291460078565555e-07, "epoch": 0.952225967685691, "percentage": 95.23, "elapsed_time": "15:40:44", "remaining_time": "0:47:09"}
|
3073 |
+
{"current_steps": 3073, "total_steps": 3226, "loss": 0.6806, "learning_rate": 2.2618535157022058e-07, "epoch": 0.9525359370762138, "percentage": 95.26, "elapsed_time": "15:41:02", "remaining_time": "0:46:51"}
|
3074 |
+
{"current_steps": 3074, "total_steps": 3226, "loss": 0.7014, "learning_rate": 2.2324383750516264e-07, "epoch": 0.9528459064667364, "percentage": 95.29, "elapsed_time": "15:41:20", "remaining_time": "0:46:32"}
|
3075 |
+
{"current_steps": 3075, "total_steps": 3226, "loss": 0.7095, "learning_rate": 2.2032146850894166e-07, "epoch": 0.9531558758572591, "percentage": 95.32, "elapsed_time": "15:41:39", "remaining_time": "0:46:14"}
|
3076 |
+
{"current_steps": 3076, "total_steps": 3226, "loss": 0.7234, "learning_rate": 2.174182474105835e-07, "epoch": 0.9534658452477818, "percentage": 95.35, "elapsed_time": "15:41:57", "remaining_time": "0:45:56"}
|
3077 |
+
{"current_steps": 3077, "total_steps": 3226, "loss": 0.7134, "learning_rate": 2.1453417702057556e-07, "epoch": 0.9537758146383045, "percentage": 95.38, "elapsed_time": "15:42:15", "remaining_time": "0:45:37"}
|
3078 |
+
{"current_steps": 3078, "total_steps": 3226, "loss": 0.7013, "learning_rate": 2.116692601308734e-07, "epoch": 0.9540857840288272, "percentage": 95.41, "elapsed_time": "15:42:34", "remaining_time": "0:45:19"}
|
3079 |
+
{"current_steps": 3079, "total_steps": 3226, "loss": 0.6877, "learning_rate": 2.08823499514883e-07, "epoch": 0.9543957534193498, "percentage": 95.44, "elapsed_time": "15:42:52", "remaining_time": "0:45:00"}
|
3080 |
+
{"current_steps": 3080, "total_steps": 3226, "loss": 0.6977, "learning_rate": 2.0599689792746956e-07, "epoch": 0.9547057228098725, "percentage": 95.47, "elapsed_time": "15:43:10", "remaining_time": "0:44:42"}
|
3081 |
+
{"current_steps": 3081, "total_steps": 3226, "loss": 0.7085, "learning_rate": 2.0318945810494873e-07, "epoch": 0.9550156922003952, "percentage": 95.51, "elapsed_time": "15:43:29", "remaining_time": "0:44:24"}
|
3082 |
+
{"current_steps": 3082, "total_steps": 3226, "loss": 0.7151, "learning_rate": 2.0040118276508647e-07, "epoch": 0.9553256615909179, "percentage": 95.54, "elapsed_time": "15:43:47", "remaining_time": "0:44:05"}
|
3083 |
+
{"current_steps": 3083, "total_steps": 3226, "loss": 0.7117, "learning_rate": 1.9763207460710587e-07, "epoch": 0.9556356309814406, "percentage": 95.57, "elapsed_time": "15:44:05", "remaining_time": "0:43:47"}
|
3084 |
+
{"current_steps": 3084, "total_steps": 3226, "loss": 0.6871, "learning_rate": 1.9488213631166043e-07, "epoch": 0.9559456003719633, "percentage": 95.6, "elapsed_time": "15:44:24", "remaining_time": "0:43:29"}
|
3085 |
+
{"current_steps": 3085, "total_steps": 3226, "loss": 0.6976, "learning_rate": 1.921513705408562e-07, "epoch": 0.9562555697624859, "percentage": 95.63, "elapsed_time": "15:44:42", "remaining_time": "0:43:10"}
|
3086 |
+
{"current_steps": 3086, "total_steps": 3226, "loss": 0.6992, "learning_rate": 1.8943977993823193e-07, "epoch": 0.9565655391530087, "percentage": 95.66, "elapsed_time": "15:45:00", "remaining_time": "0:42:52"}
|
3087 |
+
{"current_steps": 3087, "total_steps": 3226, "loss": 0.6999, "learning_rate": 1.8674736712877006e-07, "epoch": 0.9568755085435313, "percentage": 95.69, "elapsed_time": "15:45:19", "remaining_time": "0:42:33"}
|
3088 |
+
{"current_steps": 3088, "total_steps": 3226, "loss": 0.7258, "learning_rate": 1.8407413471889012e-07, "epoch": 0.9571854779340541, "percentage": 95.72, "elapsed_time": "15:45:37", "remaining_time": "0:42:15"}
|
3089 |
+
{"current_steps": 3089, "total_steps": 3226, "loss": 0.6876, "learning_rate": 1.8142008529642875e-07, "epoch": 0.9574954473245767, "percentage": 95.75, "elapsed_time": "15:45:55", "remaining_time": "0:41:57"}
|
3090 |
+
{"current_steps": 3090, "total_steps": 3226, "loss": 0.6999, "learning_rate": 1.787852214306729e-07, "epoch": 0.9578054167150993, "percentage": 95.78, "elapsed_time": "15:46:13", "remaining_time": "0:41:38"}
|
3091 |
+
{"current_steps": 3091, "total_steps": 3226, "loss": 0.6986, "learning_rate": 1.7616954567232003e-07, "epoch": 0.9581153861056221, "percentage": 95.82, "elapsed_time": "15:46:32", "remaining_time": "0:41:20"}
|
3092 |
+
{"current_steps": 3092, "total_steps": 3226, "loss": 0.6794, "learning_rate": 1.735730605535002e-07, "epoch": 0.9584253554961447, "percentage": 95.85, "elapsed_time": "15:46:50", "remaining_time": "0:41:02"}
|
3093 |
+
{"current_steps": 3093, "total_steps": 3226, "loss": 0.7037, "learning_rate": 1.70995768587765e-07, "epoch": 0.9587353248866675, "percentage": 95.88, "elapsed_time": "15:47:08", "remaining_time": "0:40:43"}
|
3094 |
+
{"current_steps": 3094, "total_steps": 3226, "loss": 0.7004, "learning_rate": 1.6843767227008756e-07, "epoch": 0.9590452942771901, "percentage": 95.91, "elapsed_time": "15:47:27", "remaining_time": "0:40:25"}
|
3095 |
+
{"current_steps": 3095, "total_steps": 3226, "loss": 0.6809, "learning_rate": 1.658987740768514e-07, "epoch": 0.9593552636677128, "percentage": 95.94, "elapsed_time": "15:47:45", "remaining_time": "0:40:06"}
|
3096 |
+
{"current_steps": 3096, "total_steps": 3226, "loss": 0.712, "learning_rate": 1.6337907646586381e-07, "epoch": 0.9596652330582355, "percentage": 95.97, "elapsed_time": "15:48:03", "remaining_time": "0:39:48"}
|
3097 |
+
{"current_steps": 3097, "total_steps": 3226, "loss": 0.7126, "learning_rate": 1.6087858187634252e-07, "epoch": 0.9599752024487582, "percentage": 96.0, "elapsed_time": "15:48:22", "remaining_time": "0:39:30"}
|
3098 |
+
{"current_steps": 3098, "total_steps": 3226, "loss": 0.736, "learning_rate": 1.5839729272890903e-07, "epoch": 0.9602851718392809, "percentage": 96.03, "elapsed_time": "15:48:40", "remaining_time": "0:39:11"}
|
3099 |
+
{"current_steps": 3099, "total_steps": 3226, "loss": 0.71, "learning_rate": 1.5593521142559964e-07, "epoch": 0.9605951412298036, "percentage": 96.06, "elapsed_time": "15:48:58", "remaining_time": "0:38:53"}
|
3100 |
+
{"current_steps": 3100, "total_steps": 3226, "loss": 0.7082, "learning_rate": 1.534923403498567e-07, "epoch": 0.9609051106203262, "percentage": 96.09, "elapsed_time": "15:49:17", "remaining_time": "0:38:35"}
|
3101 |
+
{"current_steps": 3101, "total_steps": 3226, "loss": 0.7086, "learning_rate": 1.5106868186652412e-07, "epoch": 0.9612150800108489, "percentage": 96.13, "elapsed_time": "15:49:35", "remaining_time": "0:38:16"}
|
3102 |
+
{"current_steps": 3102, "total_steps": 3226, "loss": 0.6972, "learning_rate": 1.4866423832184285e-07, "epoch": 0.9615250494013716, "percentage": 96.16, "elapsed_time": "15:49:54", "remaining_time": "0:37:58"}
|
3103 |
+
{"current_steps": 3103, "total_steps": 3226, "loss": 0.7005, "learning_rate": 1.4627901204345763e-07, "epoch": 0.9618350187918943, "percentage": 96.19, "elapsed_time": "15:50:12", "remaining_time": "0:37:39"}
|
3104 |
+
{"current_steps": 3104, "total_steps": 3226, "loss": 0.7239, "learning_rate": 1.439130053404103e-07, "epoch": 0.962144988182417, "percentage": 96.22, "elapsed_time": "15:50:30", "remaining_time": "0:37:21"}
|
3105 |
+
{"current_steps": 3105, "total_steps": 3226, "loss": 0.6995, "learning_rate": 1.4156622050313317e-07, "epoch": 0.9624549575729396, "percentage": 96.25, "elapsed_time": "15:50:49", "remaining_time": "0:37:03"}
|
3106 |
+
{"current_steps": 3106, "total_steps": 3226, "loss": 0.7269, "learning_rate": 1.3923865980345564e-07, "epoch": 0.9627649269634624, "percentage": 96.28, "elapsed_time": "15:51:07", "remaining_time": "0:36:44"}
|
3107 |
+
{"current_steps": 3107, "total_steps": 3226, "loss": 0.6723, "learning_rate": 1.3693032549459306e-07, "epoch": 0.963074896353985, "percentage": 96.31, "elapsed_time": "15:51:25", "remaining_time": "0:36:26"}
|
3108 |
+
{"current_steps": 3108, "total_steps": 3226, "loss": 0.7352, "learning_rate": 1.3464121981114463e-07, "epoch": 0.9633848657445078, "percentage": 96.34, "elapsed_time": "15:51:44", "remaining_time": "0:36:08"}
|
3109 |
+
{"current_steps": 3109, "total_steps": 3226, "loss": 0.6781, "learning_rate": 1.3237134496910664e-07, "epoch": 0.9636948351350304, "percentage": 96.37, "elapsed_time": "15:52:02", "remaining_time": "0:35:49"}
|
3110 |
+
{"current_steps": 3110, "total_steps": 3226, "loss": 0.6994, "learning_rate": 1.301207031658458e-07, "epoch": 0.9640048045255531, "percentage": 96.4, "elapsed_time": "15:52:20", "remaining_time": "0:35:31"}
|
3111 |
+
{"current_steps": 3111, "total_steps": 3226, "loss": 0.7152, "learning_rate": 1.278892965801237e-07, "epoch": 0.9643147739160758, "percentage": 96.44, "elapsed_time": "15:52:39", "remaining_time": "0:35:12"}
|
3112 |
+
{"current_steps": 3112, "total_steps": 3226, "loss": 0.6874, "learning_rate": 1.2567712737206804e-07, "epoch": 0.9646247433065984, "percentage": 96.47, "elapsed_time": "15:52:57", "remaining_time": "0:34:54"}
|
3113 |
+
{"current_steps": 3113, "total_steps": 3226, "loss": 0.6789, "learning_rate": 1.234841976831902e-07, "epoch": 0.9649347126971212, "percentage": 96.5, "elapsed_time": "15:53:15", "remaining_time": "0:34:36"}
|
3114 |
+
{"current_steps": 3114, "total_steps": 3226, "loss": 0.7204, "learning_rate": 1.2131050963638092e-07, "epoch": 0.9652446820876438, "percentage": 96.53, "elapsed_time": "15:53:34", "remaining_time": "0:34:17"}
|
3115 |
+
{"current_steps": 3115, "total_steps": 3226, "loss": 0.6623, "learning_rate": 1.191560653358903e-07, "epoch": 0.9655546514781665, "percentage": 96.56, "elapsed_time": "15:53:52", "remaining_time": "0:33:59"}
|
3116 |
+
{"current_steps": 3116, "total_steps": 3226, "loss": 0.7159, "learning_rate": 1.1702086686735448e-07, "epoch": 0.9658646208686892, "percentage": 96.59, "elapsed_time": "15:54:10", "remaining_time": "0:33:41"}
|
3117 |
+
{"current_steps": 3117, "total_steps": 3226, "loss": 0.6749, "learning_rate": 1.1490491629776667e-07, "epoch": 0.9661745902592119, "percentage": 96.62, "elapsed_time": "15:54:29", "remaining_time": "0:33:22"}
|
3118 |
+
{"current_steps": 3118, "total_steps": 3226, "loss": 0.6934, "learning_rate": 1.1280821567549505e-07, "epoch": 0.9664845596497346, "percentage": 96.65, "elapsed_time": "15:54:47", "remaining_time": "0:33:04"}
|
3119 |
+
{"current_steps": 3119, "total_steps": 3226, "loss": 0.7114, "learning_rate": 1.1073076703027153e-07, "epoch": 0.9667945290402573, "percentage": 96.68, "elapsed_time": "15:55:05", "remaining_time": "0:32:45"}
|
3120 |
+
{"current_steps": 3120, "total_steps": 3226, "loss": 0.7442, "learning_rate": 1.0867257237318519e-07, "epoch": 0.96710449843078, "percentage": 96.71, "elapsed_time": "15:55:24", "remaining_time": "0:32:27"}
|
3121 |
+
{"current_steps": 3121, "total_steps": 3226, "loss": 0.6732, "learning_rate": 1.0663363369669333e-07, "epoch": 0.9674144678213027, "percentage": 96.75, "elapsed_time": "15:55:42", "remaining_time": "0:32:09"}
|
3122 |
+
{"current_steps": 3122, "total_steps": 3226, "loss": 0.7277, "learning_rate": 1.0461395297460597e-07, "epoch": 0.9677244372118253, "percentage": 96.78, "elapsed_time": "15:56:00", "remaining_time": "0:31:50"}
|
3123 |
+
{"current_steps": 3123, "total_steps": 3226, "loss": 0.7067, "learning_rate": 1.0261353216209691e-07, "epoch": 0.968034406602348, "percentage": 96.81, "elapsed_time": "15:56:19", "remaining_time": "0:31:32"}
|
3124 |
+
{"current_steps": 3124, "total_steps": 3226, "loss": 0.7166, "learning_rate": 1.0063237319569042e-07, "epoch": 0.9683443759928707, "percentage": 96.84, "elapsed_time": "15:56:37", "remaining_time": "0:31:14"}
|
3125 |
+
{"current_steps": 3125, "total_steps": 3226, "loss": 0.6971, "learning_rate": 9.867047799326346e-08, "epoch": 0.9686543453833933, "percentage": 96.87, "elapsed_time": "15:56:55", "remaining_time": "0:30:55"}
|
3126 |
+
{"current_steps": 3126, "total_steps": 3226, "loss": 0.7031, "learning_rate": 9.672784845404792e-08, "epoch": 0.9689643147739161, "percentage": 96.9, "elapsed_time": "15:57:14", "remaining_time": "0:30:37"}
|
3127 |
+
{"current_steps": 3127, "total_steps": 3226, "loss": 0.7146, "learning_rate": 9.480448645862617e-08, "epoch": 0.9692742841644387, "percentage": 96.93, "elapsed_time": "15:57:32", "remaining_time": "0:30:18"}
|
3128 |
+
{"current_steps": 3128, "total_steps": 3226, "loss": 0.694, "learning_rate": 9.290039386892213e-08, "epoch": 0.9695842535549615, "percentage": 96.96, "elapsed_time": "15:57:50", "remaining_time": "0:30:00"}
|
3129 |
+
{"current_steps": 3129, "total_steps": 3226, "loss": 0.7336, "learning_rate": 9.101557252821247e-08, "epoch": 0.9698942229454841, "percentage": 96.99, "elapsed_time": "15:58:09", "remaining_time": "0:29:42"}
|
3130 |
+
{"current_steps": 3130, "total_steps": 3226, "loss": 0.7032, "learning_rate": 8.915002426111763e-08, "epoch": 0.9702041923360069, "percentage": 97.02, "elapsed_time": "15:58:27", "remaining_time": "0:29:23"}
|
3131 |
+
{"current_steps": 3131, "total_steps": 3226, "loss": 0.6967, "learning_rate": 8.7303750873593e-08, "epoch": 0.9705141617265295, "percentage": 97.06, "elapsed_time": "15:58:45", "remaining_time": "0:29:05"}
|
3132 |
+
{"current_steps": 3132, "total_steps": 3226, "loss": 0.7187, "learning_rate": 8.547675415294665e-08, "epoch": 0.9708241311170522, "percentage": 97.09, "elapsed_time": "15:59:03", "remaining_time": "0:28:47"}
|
3133 |
+
{"current_steps": 3133, "total_steps": 3226, "loss": 0.7121, "learning_rate": 8.366903586781494e-08, "epoch": 0.9711341005075749, "percentage": 97.12, "elapsed_time": "15:59:22", "remaining_time": "0:28:28"}
|
3134 |
+
{"current_steps": 3134, "total_steps": 3226, "loss": 0.7123, "learning_rate": 8.188059776817803e-08, "epoch": 0.9714440698980976, "percentage": 97.15, "elapsed_time": "15:59:40", "remaining_time": "0:28:10"}
|
3135 |
+
{"current_steps": 3135, "total_steps": 3226, "loss": 0.6784, "learning_rate": 8.011144158534878e-08, "epoch": 0.9717540392886203, "percentage": 97.18, "elapsed_time": "15:59:58", "remaining_time": "0:27:51"}
|
3136 |
+
{"current_steps": 3136, "total_steps": 3226, "loss": 0.7161, "learning_rate": 7.836156903197279e-08, "epoch": 0.9720640086791429, "percentage": 97.21, "elapsed_time": "16:00:17", "remaining_time": "0:27:33"}
|
3137 |
+
{"current_steps": 3137, "total_steps": 3226, "loss": 0.6918, "learning_rate": 7.663098180203721e-08, "epoch": 0.9723739780696656, "percentage": 97.24, "elapsed_time": "16:00:35", "remaining_time": "0:27:15"}
|
3138 |
+
{"current_steps": 3138, "total_steps": 3226, "loss": 0.7045, "learning_rate": 7.491968157084418e-08, "epoch": 0.9726839474601883, "percentage": 97.27, "elapsed_time": "16:00:53", "remaining_time": "0:26:56"}
|
3139 |
+
{"current_steps": 3139, "total_steps": 3226, "loss": 0.7036, "learning_rate": 7.322766999503961e-08, "epoch": 0.972993916850711, "percentage": 97.3, "elapsed_time": "16:01:12", "remaining_time": "0:26:38"}
|
3140 |
+
{"current_steps": 3140, "total_steps": 3226, "loss": 0.7342, "learning_rate": 7.155494871258884e-08, "epoch": 0.9733038862412337, "percentage": 97.33, "elapsed_time": "16:01:30", "remaining_time": "0:26:20"}
|
3141 |
+
{"current_steps": 3141, "total_steps": 3226, "loss": 0.7021, "learning_rate": 6.990151934278322e-08, "epoch": 0.9736138556317564, "percentage": 97.37, "elapsed_time": "16:01:48", "remaining_time": "0:26:01"}
|
3142 |
+
{"current_steps": 3142, "total_steps": 3226, "loss": 0.6894, "learning_rate": 6.82673834862424e-08, "epoch": 0.973923825022279, "percentage": 97.4, "elapsed_time": "16:02:06", "remaining_time": "0:25:43"}
|
3143 |
+
{"current_steps": 3143, "total_steps": 3226, "loss": 0.6889, "learning_rate": 6.66525427249054e-08, "epoch": 0.9742337944128018, "percentage": 97.43, "elapsed_time": "16:02:25", "remaining_time": "0:25:24"}
|
3144 |
+
{"current_steps": 3144, "total_steps": 3226, "loss": 0.7497, "learning_rate": 6.505699862203285e-08, "epoch": 0.9745437638033244, "percentage": 97.46, "elapsed_time": "16:02:43", "remaining_time": "0:25:06"}
|
3145 |
+
{"current_steps": 3145, "total_steps": 3226, "loss": 0.7234, "learning_rate": 6.348075272220922e-08, "epoch": 0.9748537331938472, "percentage": 97.49, "elapsed_time": "16:03:02", "remaining_time": "0:24:48"}
|
3146 |
+
{"current_steps": 3146, "total_steps": 3226, "loss": 0.7021, "learning_rate": 6.192380655132946e-08, "epoch": 0.9751637025843698, "percentage": 97.52, "elapsed_time": "16:03:20", "remaining_time": "0:24:29"}
|
3147 |
+
{"current_steps": 3147, "total_steps": 3226, "loss": 0.7178, "learning_rate": 6.038616161661015e-08, "epoch": 0.9754736719748924, "percentage": 97.55, "elapsed_time": "16:03:38", "remaining_time": "0:24:11"}
|
3148 |
+
{"current_steps": 3148, "total_steps": 3226, "loss": 0.6792, "learning_rate": 5.886781940658504e-08, "epoch": 0.9757836413654152, "percentage": 97.58, "elapsed_time": "16:03:57", "remaining_time": "0:23:53"}
|
3149 |
+
{"current_steps": 3149, "total_steps": 3226, "loss": 0.6874, "learning_rate": 5.736878139109614e-08, "epoch": 0.9760936107559378, "percentage": 97.61, "elapsed_time": "16:04:15", "remaining_time": "0:23:34"}
|
3150 |
+
{"current_steps": 3150, "total_steps": 3226, "loss": 0.681, "learning_rate": 5.588904902130266e-08, "epoch": 0.9764035801464606, "percentage": 97.64, "elapsed_time": "16:04:33", "remaining_time": "0:23:16"}
|
3151 |
+
{"current_steps": 3151, "total_steps": 3226, "loss": 0.7193, "learning_rate": 5.442862372967428e-08, "epoch": 0.9767135495369832, "percentage": 97.68, "elapsed_time": "16:04:51", "remaining_time": "0:22:57"}
|
3152 |
+
{"current_steps": 3152, "total_steps": 3226, "loss": 0.7194, "learning_rate": 5.298750692998456e-08, "epoch": 0.9770235189275059, "percentage": 97.71, "elapsed_time": "16:05:10", "remaining_time": "0:22:39"}
|
3153 |
+
{"current_steps": 3153, "total_steps": 3226, "loss": 0.7046, "learning_rate": 5.1565700017324196e-08, "epoch": 0.9773334883180286, "percentage": 97.74, "elapsed_time": "16:05:28", "remaining_time": "0:22:21"}
|
3154 |
+
{"current_steps": 3154, "total_steps": 3226, "loss": 0.7372, "learning_rate": 5.0163204368083305e-08, "epoch": 0.9776434577085513, "percentage": 97.77, "elapsed_time": "16:05:46", "remaining_time": "0:22:02"}
|
3155 |
+
{"current_steps": 3155, "total_steps": 3226, "loss": 0.7039, "learning_rate": 4.878002133996251e-08, "epoch": 0.977953427099074, "percentage": 97.8, "elapsed_time": "16:06:05", "remaining_time": "0:21:44"}
|
3156 |
+
{"current_steps": 3156, "total_steps": 3226, "loss": 0.7029, "learning_rate": 4.741615227196627e-08, "epoch": 0.9782633964895967, "percentage": 97.83, "elapsed_time": "16:06:23", "remaining_time": "0:21:26"}
|
3157 |
+
{"current_steps": 3157, "total_steps": 3226, "loss": 0.6822, "learning_rate": 4.607159848439402e-08, "epoch": 0.9785733658801193, "percentage": 97.86, "elapsed_time": "16:06:41", "remaining_time": "0:21:07"}
|
3158 |
+
{"current_steps": 3158, "total_steps": 3226, "loss": 0.707, "learning_rate": 4.4746361278860116e-08, "epoch": 0.978883335270642, "percentage": 97.89, "elapsed_time": "16:07:00", "remaining_time": "0:20:49"}
|
3159 |
+
{"current_steps": 3159, "total_steps": 3226, "loss": 0.6831, "learning_rate": 4.344044193826946e-08, "epoch": 0.9791933046611647, "percentage": 97.92, "elapsed_time": "16:07:18", "remaining_time": "0:20:30"}
|
3160 |
+
{"current_steps": 3160, "total_steps": 3226, "loss": 0.6972, "learning_rate": 4.215384172683079e-08, "epoch": 0.9795032740516874, "percentage": 97.95, "elapsed_time": "16:07:36", "remaining_time": "0:20:12"}
|
3161 |
+
{"current_steps": 3161, "total_steps": 3226, "loss": 0.6802, "learning_rate": 4.088656189004558e-08, "epoch": 0.9798132434422101, "percentage": 97.99, "elapsed_time": "16:07:55", "remaining_time": "0:19:54"}
|
3162 |
+
{"current_steps": 3162, "total_steps": 3226, "loss": 0.6864, "learning_rate": 3.9638603654719163e-08, "epoch": 0.9801232128327327, "percentage": 98.02, "elapsed_time": "16:08:13", "remaining_time": "0:19:35"}
|
3163 |
+
{"current_steps": 3163, "total_steps": 3226, "loss": 0.6835, "learning_rate": 3.840996822894738e-08, "epoch": 0.9804331822232555, "percentage": 98.05, "elapsed_time": "16:08:31", "remaining_time": "0:19:17"}
|
3164 |
+
{"current_steps": 3164, "total_steps": 3226, "loss": 0.7106, "learning_rate": 3.720065680212326e-08, "epoch": 0.9807431516137781, "percentage": 98.08, "elapsed_time": "16:08:50", "remaining_time": "0:18:59"}
|
3165 |
+
{"current_steps": 3165, "total_steps": 3226, "loss": 0.7048, "learning_rate": 3.6010670544930346e-08, "epoch": 0.9810531210043009, "percentage": 98.11, "elapsed_time": "16:09:08", "remaining_time": "0:18:40"}
|
3166 |
+
{"current_steps": 3166, "total_steps": 3226, "loss": 0.671, "learning_rate": 3.4840010609344944e-08, "epoch": 0.9813630903948235, "percentage": 98.14, "elapsed_time": "16:09:26", "remaining_time": "0:18:22"}
|
3167 |
+
{"current_steps": 3167, "total_steps": 3226, "loss": 0.6922, "learning_rate": 3.36886781286383e-08, "epoch": 0.9816730597853462, "percentage": 98.17, "elapsed_time": "16:09:45", "remaining_time": "0:18:03"}
|
3168 |
+
{"current_steps": 3168, "total_steps": 3226, "loss": 0.7713, "learning_rate": 3.255667421736552e-08, "epoch": 0.9819830291758689, "percentage": 98.2, "elapsed_time": "16:10:03", "remaining_time": "0:17:45"}
|
3169 |
+
{"current_steps": 3169, "total_steps": 3226, "loss": 0.7392, "learning_rate": 3.1443999971372265e-08, "epoch": 0.9822929985663915, "percentage": 98.23, "elapsed_time": "16:10:21", "remaining_time": "0:17:27"}
|
3170 |
+
{"current_steps": 3170, "total_steps": 3226, "loss": 0.704, "learning_rate": 3.035065646779467e-08, "epoch": 0.9826029679569143, "percentage": 98.26, "elapsed_time": "16:10:40", "remaining_time": "0:17:08"}
|
3171 |
+
{"current_steps": 3171, "total_steps": 3226, "loss": 0.6916, "learning_rate": 2.9276644765054985e-08, "epoch": 0.9829129373474369, "percentage": 98.3, "elapsed_time": "16:10:58", "remaining_time": "0:16:50"}
|
3172 |
+
{"current_steps": 3172, "total_steps": 3226, "loss": 0.7244, "learning_rate": 2.8221965902859306e-08, "epoch": 0.9832229067379596, "percentage": 98.33, "elapsed_time": "16:11:16", "remaining_time": "0:16:32"}
|
3173 |
+
{"current_steps": 3173, "total_steps": 3226, "loss": 0.6628, "learning_rate": 2.718662090219759e-08, "epoch": 0.9835328761284823, "percentage": 98.36, "elapsed_time": "16:11:35", "remaining_time": "0:16:13"}
|
3174 |
+
{"current_steps": 3174, "total_steps": 3226, "loss": 0.7195, "learning_rate": 2.6170610765348102e-08, "epoch": 0.983842845519005, "percentage": 98.39, "elapsed_time": "16:11:53", "remaining_time": "0:15:55"}
|
3175 |
+
{"current_steps": 3175, "total_steps": 3226, "loss": 0.7191, "learning_rate": 2.517393647586408e-08, "epoch": 0.9841528149095277, "percentage": 98.42, "elapsed_time": "16:12:11", "remaining_time": "0:15:36"}
|
3176 |
+
{"current_steps": 3176, "total_steps": 3226, "loss": 0.7293, "learning_rate": 2.4196598998589283e-08, "epoch": 0.9844627843000504, "percentage": 98.45, "elapsed_time": "16:12:30", "remaining_time": "0:15:18"}
|
3177 |
+
{"current_steps": 3177, "total_steps": 3226, "loss": 0.7012, "learning_rate": 2.323859927964245e-08, "epoch": 0.984772753690573, "percentage": 98.48, "elapsed_time": "16:12:48", "remaining_time": "0:15:00"}
|
3178 |
+
{"current_steps": 3178, "total_steps": 3226, "loss": 0.703, "learning_rate": 2.2299938246423958e-08, "epoch": 0.9850827230810958, "percentage": 98.51, "elapsed_time": "16:13:06", "remaining_time": "0:14:41"}
|
3179 |
+
{"current_steps": 3179, "total_steps": 3226, "loss": 0.7021, "learning_rate": 2.1380616807613607e-08, "epoch": 0.9853926924716184, "percentage": 98.54, "elapsed_time": "16:13:25", "remaining_time": "0:14:23"}
|
3180 |
+
{"current_steps": 3180, "total_steps": 3226, "loss": 0.701, "learning_rate": 2.0480635853168397e-08, "epoch": 0.9857026618621411, "percentage": 98.57, "elapsed_time": "16:13:43", "remaining_time": "0:14:05"}
|
3181 |
+
{"current_steps": 3181, "total_steps": 3226, "loss": 0.6998, "learning_rate": 1.9599996254322518e-08, "epoch": 0.9860126312526638, "percentage": 98.61, "elapsed_time": "16:14:01", "remaining_time": "0:13:46"}
|
3182 |
+
{"current_steps": 3182, "total_steps": 3226, "loss": 0.6802, "learning_rate": 1.873869886358959e-08, "epoch": 0.9863226006431864, "percentage": 98.64, "elapsed_time": "16:14:20", "remaining_time": "0:13:28"}
|
3183 |
+
{"current_steps": 3183, "total_steps": 3226, "loss": 0.7001, "learning_rate": 1.789674451475154e-08, "epoch": 0.9866325700337092, "percentage": 98.67, "elapsed_time": "16:14:38", "remaining_time": "0:13:10"}
|
3184 |
+
{"current_steps": 3184, "total_steps": 3226, "loss": 0.6938, "learning_rate": 1.707413402287639e-08, "epoch": 0.9869425394242318, "percentage": 98.7, "elapsed_time": "16:14:56", "remaining_time": "0:12:51"}
|
3185 |
+
{"current_steps": 3185, "total_steps": 3226, "loss": 0.7049, "learning_rate": 1.6270868184296017e-08, "epoch": 0.9872525088147546, "percentage": 98.73, "elapsed_time": "16:15:15", "remaining_time": "0:12:33"}
|
3186 |
+
{"current_steps": 3186, "total_steps": 3226, "loss": 0.71, "learning_rate": 1.54869477766173e-08, "epoch": 0.9875624782052772, "percentage": 98.76, "elapsed_time": "16:15:33", "remaining_time": "0:12:14"}
|
3187 |
+
{"current_steps": 3187, "total_steps": 3226, "loss": 0.6643, "learning_rate": 1.472237355872652e-08, "epoch": 0.9878724475958, "percentage": 98.79, "elapsed_time": "16:15:51", "remaining_time": "0:11:56"}
|
3188 |
+
{"current_steps": 3188, "total_steps": 3226, "loss": 0.7341, "learning_rate": 1.3977146270771625e-08, "epoch": 0.9881824169863226, "percentage": 98.82, "elapsed_time": "16:16:10", "remaining_time": "0:11:38"}
|
3189 |
+
{"current_steps": 3189, "total_steps": 3226, "loss": 0.693, "learning_rate": 1.3251266634182191e-08, "epoch": 0.9884923863768453, "percentage": 98.85, "elapsed_time": "16:16:28", "remaining_time": "0:11:19"}
|
3190 |
+
{"current_steps": 3190, "total_steps": 3226, "loss": 0.6971, "learning_rate": 1.2544735351647241e-08, "epoch": 0.988802355767368, "percentage": 98.88, "elapsed_time": "16:16:46", "remaining_time": "0:11:01"}
|
3191 |
+
{"current_steps": 3191, "total_steps": 3226, "loss": 0.7193, "learning_rate": 1.1857553107132991e-08, "epoch": 0.9891123251578906, "percentage": 98.92, "elapsed_time": "16:17:04", "remaining_time": "0:10:43"}
|
3192 |
+
{"current_steps": 3192, "total_steps": 3226, "loss": 0.7173, "learning_rate": 1.1189720565873974e-08, "epoch": 0.9894222945484134, "percentage": 98.95, "elapsed_time": "16:17:23", "remaining_time": "0:10:24"}
|
3193 |
+
{"current_steps": 3193, "total_steps": 3226, "loss": 0.7205, "learning_rate": 1.05412383743686e-08, "epoch": 0.989732263938936, "percentage": 98.98, "elapsed_time": "16:17:41", "remaining_time": "0:10:06"}
|
3194 |
+
{"current_steps": 3194, "total_steps": 3226, "loss": 0.7329, "learning_rate": 9.91210716038804e-09, "epoch": 0.9900422333294587, "percentage": 99.01, "elapsed_time": "16:17:59", "remaining_time": "0:09:47"}
|
3195 |
+
{"current_steps": 3195, "total_steps": 3226, "loss": 0.7058, "learning_rate": 9.302327532969558e-09, "epoch": 0.9903522027199814, "percentage": 99.04, "elapsed_time": "16:18:18", "remaining_time": "0:09:29"}
|
3196 |
+
{"current_steps": 3196, "total_steps": 3226, "loss": 0.691, "learning_rate": 8.711900082412072e-09, "epoch": 0.9906621721105041, "percentage": 99.07, "elapsed_time": "16:18:36", "remaining_time": "0:09:11"}
|
3197 |
+
{"current_steps": 3197, "total_steps": 3226, "loss": 0.6721, "learning_rate": 8.140825380287266e-09, "epoch": 0.9909721415010267, "percentage": 99.1, "elapsed_time": "16:18:54", "remaining_time": "0:08:52"}
|
3198 |
+
{"current_steps": 3198, "total_steps": 3226, "loss": 0.7323, "learning_rate": 7.58910397942847e-09, "epoch": 0.9912821108915495, "percentage": 99.13, "elapsed_time": "16:19:13", "remaining_time": "0:08:34"}
|
3199 |
+
{"current_steps": 3199, "total_steps": 3226, "loss": 0.7044, "learning_rate": 7.056736413935117e-09, "epoch": 0.9915920802820721, "percentage": 99.16, "elapsed_time": "16:19:31", "remaining_time": "0:08:16"}
|
3200 |
+
{"current_steps": 3200, "total_steps": 3226, "loss": 0.6924, "learning_rate": 6.543723199170515e-09, "epoch": 0.9919020496725949, "percentage": 99.19, "elapsed_time": "16:19:49", "remaining_time": "0:07:57"}
|
3201 |
+
{"current_steps": 3201, "total_steps": 3226, "loss": 0.7135, "learning_rate": 6.050064831759628e-09, "epoch": 0.9922120190631175, "percentage": 99.23, "elapsed_time": "16:20:08", "remaining_time": "0:07:39"}
|
3202 |
+
{"current_steps": 3202, "total_steps": 3226, "loss": 0.702, "learning_rate": 5.5757617895935144e-09, "epoch": 0.9925219884536403, "percentage": 99.26, "elapsed_time": "16:20:26", "remaining_time": "0:07:20"}
|
3203 |
+
{"current_steps": 3203, "total_steps": 3226, "loss": 0.6885, "learning_rate": 5.120814531829332e-09, "epoch": 0.9928319578441629, "percentage": 99.29, "elapsed_time": "16:20:44", "remaining_time": "0:07:02"}
|
3204 |
+
{"current_steps": 3204, "total_steps": 3226, "loss": 0.7112, "learning_rate": 4.685223498877012e-09, "epoch": 0.9931419272346855, "percentage": 99.32, "elapsed_time": "16:21:03", "remaining_time": "0:06:44"}
|
3205 |
+
{"current_steps": 3205, "total_steps": 3226, "loss": 0.7447, "learning_rate": 4.268989112419242e-09, "epoch": 0.9934518966252083, "percentage": 99.35, "elapsed_time": "16:21:21", "remaining_time": "0:06:25"}
|
3206 |
+
{"current_steps": 3206, "total_steps": 3226, "loss": 0.6935, "learning_rate": 3.872111775393705e-09, "epoch": 0.9937618660157309, "percentage": 99.38, "elapsed_time": "16:21:39", "remaining_time": "0:06:07"}
|
3207 |
+
{"current_steps": 3207, "total_steps": 3226, "loss": 0.6795, "learning_rate": 3.4945918720019622e-09, "epoch": 0.9940718354062537, "percentage": 99.41, "elapsed_time": "16:21:58", "remaining_time": "0:05:49"}
|
3208 |
+
{"current_steps": 3208, "total_steps": 3226, "loss": 0.6747, "learning_rate": 3.136429767705007e-09, "epoch": 0.9943818047967763, "percentage": 99.44, "elapsed_time": "16:22:16", "remaining_time": "0:05:30"}
|
3209 |
+
{"current_steps": 3209, "total_steps": 3226, "loss": 0.7122, "learning_rate": 2.79762580922327e-09, "epoch": 0.994691774187299, "percentage": 99.47, "elapsed_time": "16:22:34", "remaining_time": "0:05:12"}
|
3210 |
+
{"current_steps": 3210, "total_steps": 3226, "loss": 0.6877, "learning_rate": 2.4781803245410574e-09, "epoch": 0.9950017435778217, "percentage": 99.5, "elapsed_time": "16:22:52", "remaining_time": "0:04:53"}
|
3211 |
+
{"current_steps": 3211, "total_steps": 3226, "loss": 0.6964, "learning_rate": 2.1780936228998904e-09, "epoch": 0.9953117129683444, "percentage": 99.54, "elapsed_time": "16:23:11", "remaining_time": "0:04:35"}
|
3212 |
+
{"current_steps": 3212, "total_steps": 3226, "loss": 0.71, "learning_rate": 1.897365994800726e-09, "epoch": 0.995621682358867, "percentage": 99.57, "elapsed_time": "16:23:29", "remaining_time": "0:04:17"}
|
3213 |
+
{"current_steps": 3213, "total_steps": 3226, "loss": 0.7095, "learning_rate": 1.6359977120061765e-09, "epoch": 0.9959316517493898, "percentage": 99.6, "elapsed_time": "16:23:47", "remaining_time": "0:03:58"}
|
3214 |
+
{"current_steps": 3214, "total_steps": 3226, "loss": 0.6801, "learning_rate": 1.3939890275338486e-09, "epoch": 0.9962416211399124, "percentage": 99.63, "elapsed_time": "16:24:06", "remaining_time": "0:03:40"}
|
3215 |
+
{"current_steps": 3215, "total_steps": 3226, "loss": 0.7088, "learning_rate": 1.1713401756652253e-09, "epoch": 0.9965515905304351, "percentage": 99.66, "elapsed_time": "16:24:24", "remaining_time": "0:03:22"}
|
3216 |
+
{"current_steps": 3216, "total_steps": 3226, "loss": 0.7, "learning_rate": 9.680513719345642e-10, "epoch": 0.9968615599209578, "percentage": 99.69, "elapsed_time": "16:24:42", "remaining_time": "0:03:03"}
|
3217 |
+
{"current_steps": 3217, "total_steps": 3226, "loss": 0.7342, "learning_rate": 7.841228131399981e-10, "epoch": 0.9971715293114805, "percentage": 99.72, "elapsed_time": "16:25:01", "remaining_time": "0:02:45"}
|
3218 |
+
{"current_steps": 3218, "total_steps": 3226, "loss": 0.6986, "learning_rate": 6.19554677334655e-10, "epoch": 0.9974814987020032, "percentage": 99.75, "elapsed_time": "16:25:19", "remaining_time": "0:02:26"}
|
3219 |
+
{"current_steps": 3219, "total_steps": 3226, "loss": 0.71, "learning_rate": 4.743471238288777e-10, "epoch": 0.9977914680925258, "percentage": 99.78, "elapsed_time": "16:25:37", "remaining_time": "0:02:08"}
|
3220 |
+
{"current_steps": 3220, "total_steps": 3226, "loss": 0.7046, "learning_rate": 3.485002931946646e-10, "epoch": 0.9981014374830486, "percentage": 99.81, "elapsed_time": "16:25:56", "remaining_time": "0:01:50"}
|
3221 |
+
{"current_steps": 3221, "total_steps": 3226, "loss": 0.708, "learning_rate": 2.420143072567882e-10, "epoch": 0.9984114068735712, "percentage": 99.85, "elapsed_time": "16:26:14", "remaining_time": "0:01:31"}
|
3222 |
+
{"current_steps": 3222, "total_steps": 3226, "loss": 0.707, "learning_rate": 1.548892691016768e-10, "epoch": 0.998721376264094, "percentage": 99.88, "elapsed_time": "16:26:32", "remaining_time": "0:01:13"}
|
3223 |
+
{"current_steps": 3223, "total_steps": 3226, "loss": 0.7189, "learning_rate": 8.712526306853264e-11, "epoch": 0.9990313456546166, "percentage": 99.91, "elapsed_time": "16:26:51", "remaining_time": "0:00:55"}
|
3224 |
+
{"current_steps": 3224, "total_steps": 3226, "loss": 0.7071, "learning_rate": 3.872235476043429e-11, "epoch": 0.9993413150451393, "percentage": 99.94, "elapsed_time": "16:27:09", "remaining_time": "0:00:36"}
|
3225 |
+
{"current_steps": 3225, "total_steps": 3226, "loss": 0.6814, "learning_rate": 9.680591033234265e-12, "epoch": 0.999651284435662, "percentage": 99.97, "elapsed_time": "16:27:27", "remaining_time": "0:00:18"}
|
3226 |
+
{"current_steps": 3226, "total_steps": 3226, "loss": 0.7025, "learning_rate": 0.0, "epoch": 0.9999612538261846, "percentage": 100.0, "elapsed_time": "16:27:46", "remaining_time": "0:00:00"}
|
3227 |
+
{"current_steps": 3226, "total_steps": 3226, "epoch": 0.9999612538261846, "percentage": 100.0, "elapsed_time": "16:28:09", "remaining_time": "0:00:00"}
|