elsayedissa
commited on
Commit
•
69113d2
1
Parent(s):
7fb866c
Training in progress, step 13000
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +2 -2
- last-checkpoint/scaler.pt +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +252 -3
- pytorch_model.bin +1 -1
- runs/Dec26_19-57-19_gpu07.cyverse.org/events.out.tfevents.1672109970.gpu07.cyverse.org.126369.0 +2 -2
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 12347192855
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9b82e023bcdce3d9867de00994de7448663053d12658724461977f225ef150ba
|
3 |
size 12347192855
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6173655480
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b873f46f3287d785fe9e0e65cbecdf06f727c84e188245367c48b687b9339708
|
3 |
size 6173655480
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c516e23ac00a4fed94805529a2f025da998c6366d20bf4032bb8f1a2f5ae0ce3
|
3 |
+
size 14511
|
last-checkpoint/scaler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 557
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c19297e7f4fe97d7584a052fffbfde0e921e6770e0bba0e78fc588359f317245
|
3 |
size 557
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52755b2d249438bf866dbd6a565d7c609e276075f6ad3cb63ef2a63e3670f1cd
|
3 |
size 627
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.11536111854119403,
|
3 |
"best_model_checkpoint": "/storage/elsayedissa/whisper-large-v2-spanish/checkpoint-1000",
|
4 |
-
"epoch": 0.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -2994,11 +2994,260 @@
|
|
2994 |
"eval_steps_per_second": 0.063,
|
2995 |
"eval_wer": 0.09824881201229761,
|
2996 |
"step": 12000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2997 |
}
|
2998 |
],
|
2999 |
"max_steps": 25000,
|
3000 |
"num_train_epochs": 1,
|
3001 |
-
"total_flos": 4.
|
3002 |
"trial_name": null,
|
3003 |
"trial_params": null
|
3004 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.11536111854119403,
|
3 |
"best_model_checkpoint": "/storage/elsayedissa/whisper-large-v2-spanish/checkpoint-1000",
|
4 |
+
"epoch": 0.4512478739282863,
|
5 |
+
"global_step": 13000,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
2994 |
"eval_steps_per_second": 0.063,
|
2995 |
"eval_wer": 0.09824881201229761,
|
2996 |
"step": 12000
|
2997 |
+
},
|
2998 |
+
{
|
2999 |
+
"epoch": 0.42,
|
3000 |
+
"learning_rate": 5.300000000000001e-06,
|
3001 |
+
"loss": 0.1843,
|
3002 |
+
"step": 12025
|
3003 |
+
},
|
3004 |
+
{
|
3005 |
+
"epoch": 0.42,
|
3006 |
+
"learning_rate": 5.2897959183673476e-06,
|
3007 |
+
"loss": 0.155,
|
3008 |
+
"step": 12050
|
3009 |
+
},
|
3010 |
+
{
|
3011 |
+
"epoch": 0.42,
|
3012 |
+
"learning_rate": 5.279591836734694e-06,
|
3013 |
+
"loss": 0.1799,
|
3014 |
+
"step": 12075
|
3015 |
+
},
|
3016 |
+
{
|
3017 |
+
"epoch": 0.42,
|
3018 |
+
"learning_rate": 5.269387755102041e-06,
|
3019 |
+
"loss": 0.1632,
|
3020 |
+
"step": 12100
|
3021 |
+
},
|
3022 |
+
{
|
3023 |
+
"epoch": 0.42,
|
3024 |
+
"learning_rate": 5.259183673469388e-06,
|
3025 |
+
"loss": 0.172,
|
3026 |
+
"step": 12125
|
3027 |
+
},
|
3028 |
+
{
|
3029 |
+
"epoch": 0.42,
|
3030 |
+
"learning_rate": 5.248979591836735e-06,
|
3031 |
+
"loss": 0.17,
|
3032 |
+
"step": 12150
|
3033 |
+
},
|
3034 |
+
{
|
3035 |
+
"epoch": 0.42,
|
3036 |
+
"learning_rate": 5.238775510204082e-06,
|
3037 |
+
"loss": 0.1863,
|
3038 |
+
"step": 12175
|
3039 |
+
},
|
3040 |
+
{
|
3041 |
+
"epoch": 0.42,
|
3042 |
+
"learning_rate": 5.22857142857143e-06,
|
3043 |
+
"loss": 0.1611,
|
3044 |
+
"step": 12200
|
3045 |
+
},
|
3046 |
+
{
|
3047 |
+
"epoch": 0.42,
|
3048 |
+
"learning_rate": 5.218367346938777e-06,
|
3049 |
+
"loss": 0.1737,
|
3050 |
+
"step": 12225
|
3051 |
+
},
|
3052 |
+
{
|
3053 |
+
"epoch": 0.43,
|
3054 |
+
"learning_rate": 5.208163265306123e-06,
|
3055 |
+
"loss": 0.164,
|
3056 |
+
"step": 12250
|
3057 |
+
},
|
3058 |
+
{
|
3059 |
+
"epoch": 0.43,
|
3060 |
+
"learning_rate": 5.19795918367347e-06,
|
3061 |
+
"loss": 0.1639,
|
3062 |
+
"step": 12275
|
3063 |
+
},
|
3064 |
+
{
|
3065 |
+
"epoch": 0.43,
|
3066 |
+
"learning_rate": 5.1877551020408165e-06,
|
3067 |
+
"loss": 0.1428,
|
3068 |
+
"step": 12300
|
3069 |
+
},
|
3070 |
+
{
|
3071 |
+
"epoch": 0.43,
|
3072 |
+
"learning_rate": 5.177551020408164e-06,
|
3073 |
+
"loss": 0.1688,
|
3074 |
+
"step": 12325
|
3075 |
+
},
|
3076 |
+
{
|
3077 |
+
"epoch": 0.43,
|
3078 |
+
"learning_rate": 5.167346938775511e-06,
|
3079 |
+
"loss": 0.155,
|
3080 |
+
"step": 12350
|
3081 |
+
},
|
3082 |
+
{
|
3083 |
+
"epoch": 0.43,
|
3084 |
+
"learning_rate": 5.157142857142857e-06,
|
3085 |
+
"loss": 0.1702,
|
3086 |
+
"step": 12375
|
3087 |
+
},
|
3088 |
+
{
|
3089 |
+
"epoch": 0.43,
|
3090 |
+
"learning_rate": 5.146938775510204e-06,
|
3091 |
+
"loss": 0.1409,
|
3092 |
+
"step": 12400
|
3093 |
+
},
|
3094 |
+
{
|
3095 |
+
"epoch": 0.43,
|
3096 |
+
"learning_rate": 5.136734693877552e-06,
|
3097 |
+
"loss": 0.1677,
|
3098 |
+
"step": 12425
|
3099 |
+
},
|
3100 |
+
{
|
3101 |
+
"epoch": 0.43,
|
3102 |
+
"learning_rate": 5.126530612244899e-06,
|
3103 |
+
"loss": 0.139,
|
3104 |
+
"step": 12450
|
3105 |
+
},
|
3106 |
+
{
|
3107 |
+
"epoch": 0.43,
|
3108 |
+
"learning_rate": 5.1163265306122455e-06,
|
3109 |
+
"loss": 0.184,
|
3110 |
+
"step": 12475
|
3111 |
+
},
|
3112 |
+
{
|
3113 |
+
"epoch": 0.43,
|
3114 |
+
"learning_rate": 5.106122448979592e-06,
|
3115 |
+
"loss": 0.1452,
|
3116 |
+
"step": 12500
|
3117 |
+
},
|
3118 |
+
{
|
3119 |
+
"epoch": 0.43,
|
3120 |
+
"learning_rate": 5.09591836734694e-06,
|
3121 |
+
"loss": 0.1651,
|
3122 |
+
"step": 12525
|
3123 |
+
},
|
3124 |
+
{
|
3125 |
+
"epoch": 0.44,
|
3126 |
+
"learning_rate": 5.085714285714286e-06,
|
3127 |
+
"loss": 0.1628,
|
3128 |
+
"step": 12550
|
3129 |
+
},
|
3130 |
+
{
|
3131 |
+
"epoch": 0.44,
|
3132 |
+
"learning_rate": 5.075510204081633e-06,
|
3133 |
+
"loss": 0.181,
|
3134 |
+
"step": 12575
|
3135 |
+
},
|
3136 |
+
{
|
3137 |
+
"epoch": 0.44,
|
3138 |
+
"learning_rate": 5.0653061224489795e-06,
|
3139 |
+
"loss": 0.1515,
|
3140 |
+
"step": 12600
|
3141 |
+
},
|
3142 |
+
{
|
3143 |
+
"epoch": 0.44,
|
3144 |
+
"learning_rate": 5.055102040816326e-06,
|
3145 |
+
"loss": 0.1748,
|
3146 |
+
"step": 12625
|
3147 |
+
},
|
3148 |
+
{
|
3149 |
+
"epoch": 0.44,
|
3150 |
+
"learning_rate": 5.0448979591836745e-06,
|
3151 |
+
"loss": 0.1504,
|
3152 |
+
"step": 12650
|
3153 |
+
},
|
3154 |
+
{
|
3155 |
+
"epoch": 0.44,
|
3156 |
+
"learning_rate": 5.034693877551021e-06,
|
3157 |
+
"loss": 0.1696,
|
3158 |
+
"step": 12675
|
3159 |
+
},
|
3160 |
+
{
|
3161 |
+
"epoch": 0.44,
|
3162 |
+
"learning_rate": 5.024489795918368e-06,
|
3163 |
+
"loss": 0.1604,
|
3164 |
+
"step": 12700
|
3165 |
+
},
|
3166 |
+
{
|
3167 |
+
"epoch": 0.44,
|
3168 |
+
"learning_rate": 5.014285714285715e-06,
|
3169 |
+
"loss": 0.1779,
|
3170 |
+
"step": 12725
|
3171 |
+
},
|
3172 |
+
{
|
3173 |
+
"epoch": 0.44,
|
3174 |
+
"learning_rate": 5.004081632653062e-06,
|
3175 |
+
"loss": 0.1646,
|
3176 |
+
"step": 12750
|
3177 |
+
},
|
3178 |
+
{
|
3179 |
+
"epoch": 0.44,
|
3180 |
+
"learning_rate": 4.9938775510204086e-06,
|
3181 |
+
"loss": 0.1556,
|
3182 |
+
"step": 12775
|
3183 |
+
},
|
3184 |
+
{
|
3185 |
+
"epoch": 0.44,
|
3186 |
+
"learning_rate": 4.983673469387755e-06,
|
3187 |
+
"loss": 0.1498,
|
3188 |
+
"step": 12800
|
3189 |
+
},
|
3190 |
+
{
|
3191 |
+
"epoch": 0.45,
|
3192 |
+
"learning_rate": 4.973469387755103e-06,
|
3193 |
+
"loss": 0.1715,
|
3194 |
+
"step": 12825
|
3195 |
+
},
|
3196 |
+
{
|
3197 |
+
"epoch": 0.45,
|
3198 |
+
"learning_rate": 4.963265306122449e-06,
|
3199 |
+
"loss": 0.1219,
|
3200 |
+
"step": 12850
|
3201 |
+
},
|
3202 |
+
{
|
3203 |
+
"epoch": 0.45,
|
3204 |
+
"learning_rate": 4.953061224489796e-06,
|
3205 |
+
"loss": 0.1666,
|
3206 |
+
"step": 12875
|
3207 |
+
},
|
3208 |
+
{
|
3209 |
+
"epoch": 0.45,
|
3210 |
+
"learning_rate": 4.9428571428571435e-06,
|
3211 |
+
"loss": 0.1476,
|
3212 |
+
"step": 12900
|
3213 |
+
},
|
3214 |
+
{
|
3215 |
+
"epoch": 0.45,
|
3216 |
+
"learning_rate": 4.93265306122449e-06,
|
3217 |
+
"loss": 0.1741,
|
3218 |
+
"step": 12925
|
3219 |
+
},
|
3220 |
+
{
|
3221 |
+
"epoch": 0.45,
|
3222 |
+
"learning_rate": 4.9224489795918376e-06,
|
3223 |
+
"loss": 0.1579,
|
3224 |
+
"step": 12950
|
3225 |
+
},
|
3226 |
+
{
|
3227 |
+
"epoch": 0.45,
|
3228 |
+
"learning_rate": 4.912244897959184e-06,
|
3229 |
+
"loss": 0.164,
|
3230 |
+
"step": 12975
|
3231 |
+
},
|
3232 |
+
{
|
3233 |
+
"epoch": 0.45,
|
3234 |
+
"learning_rate": 4.902040816326531e-06,
|
3235 |
+
"loss": 0.1612,
|
3236 |
+
"step": 13000
|
3237 |
+
},
|
3238 |
+
{
|
3239 |
+
"epoch": 0.45,
|
3240 |
+
"eval_loss": 0.17242176830768585,
|
3241 |
+
"eval_runtime": 31404.3519,
|
3242 |
+
"eval_samples_per_second": 0.988,
|
3243 |
+
"eval_steps_per_second": 0.062,
|
3244 |
+
"eval_wer": 0.09758505793813547,
|
3245 |
+
"step": 13000
|
3246 |
}
|
3247 |
],
|
3248 |
"max_steps": 25000,
|
3249 |
"num_train_epochs": 1,
|
3250 |
+
"total_flos": 4.416195723264e+20,
|
3251 |
"trial_name": null,
|
3252 |
"trial_params": null
|
3253 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6173655480
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b873f46f3287d785fe9e0e65cbecdf06f727c84e188245367c48b687b9339708
|
3 |
size 6173655480
|
runs/Dec26_19-57-19_gpu07.cyverse.org/events.out.tfevents.1672109970.gpu07.cyverse.org.126369.0
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6a3ad336968cb511fe9ae780cd83e8101970e1c047eb76f8596c5e84f34771a2
|
3 |
+
size 90163
|